diff --git a/.gitattributes b/.gitattributes index dfe07704..2ade174b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,5 @@ # Auto detect text files and perform LF normalization * text=auto +*.mp4 filter=lfs diff=lfs merge=lfs -text +*.wav filter=lfs diff=lfs merge=lfs -text +*.webm filter=lfs diff=lfs merge=lfs -text diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/CmdLine.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/CmdLine.cpython-311.pyc index 897de6c3..996f9f71 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/CmdLine.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/CmdLine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/DebugFlags.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/DebugFlags.cpython-311.pyc index 941ee9d0..6134c915 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/DebugFlags.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/DebugFlags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Errors.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Errors.cpython-311.pyc index ca361c8f..49d6dd7d 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Errors.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Future.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Future.cpython-311.pyc index f422b275..8caee058 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Future.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Future.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Lexicon.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Lexicon.cpython-311.pyc index 481ee376..3f3fa6da 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Lexicon.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Lexicon.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Main.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Main.cpython-311.pyc index 0f71e3ac..c28fabce 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Main.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Naming.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Naming.cpython-311.pyc index f54680c6..fa2b6595 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Naming.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Naming.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Options.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Options.cpython-311.pyc index e839bc49..ab759700 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Options.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Options.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/PyrexTypes.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/PyrexTypes.cpython-311.pyc index 25f76862..da5f3462 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/PyrexTypes.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/PyrexTypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/StringEncoding.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/StringEncoding.cpython-311.pyc index 600a244e..8f041e8f 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/StringEncoding.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/StringEncoding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Symtab.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Symtab.cpython-311.pyc index d04aff78..491f179d 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Symtab.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/Symtab.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/TypeSlots.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/TypeSlots.cpython-311.pyc index 3cec81a1..4347612a 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/TypeSlots.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/TypeSlots.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/__init__.cpython-311.pyc index fdd15f66..40a0dee4 100644 Binary files a/.venv/Lib/site-packages/Cython/Compiler/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Compiler/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Distutils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Distutils/__pycache__/__init__.cpython-311.pyc index 5e422499..1f1f634b 100644 Binary files a/.venv/Lib/site-packages/Cython/Distutils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Distutils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Distutils/__pycache__/build_ext.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Distutils/__pycache__/build_ext.cpython-311.pyc index 4c3317a0..09b9e10f 100644 Binary files a/.venv/Lib/site-packages/Cython/Distutils/__pycache__/build_ext.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Distutils/__pycache__/build_ext.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Distutils/__pycache__/extension.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Distutils/__pycache__/extension.cpython-311.pyc index eae23455..1253e64c 100644 Binary files a/.venv/Lib/site-packages/Cython/Distutils/__pycache__/extension.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Distutils/__pycache__/extension.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Plex/__pycache__/Errors.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Plex/__pycache__/Errors.cpython-311.pyc index b4304739..18f2a600 100644 Binary files a/.venv/Lib/site-packages/Cython/Plex/__pycache__/Errors.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Plex/__pycache__/Errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-311.pyc index 325c818c..7db01b96 100644 Binary files a/.venv/Lib/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Plex/__pycache__/Regexps.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Plex/__pycache__/Regexps.cpython-311.pyc index 7565a776..bfa8da30 100644 Binary files a/.venv/Lib/site-packages/Cython/Plex/__pycache__/Regexps.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Plex/__pycache__/Regexps.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/Plex/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/Cython/Plex/__pycache__/__init__.cpython-311.pyc index 234a2a50..c2863252 100644 Binary files a/.venv/Lib/site-packages/Cython/Plex/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/Plex/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/__pycache__/Shadow.cpython-311.pyc b/.venv/Lib/site-packages/Cython/__pycache__/Shadow.cpython-311.pyc index 7cc1388a..566eec41 100644 Binary files a/.venv/Lib/site-packages/Cython/__pycache__/Shadow.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/__pycache__/Shadow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/Cython/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/Cython/__pycache__/__init__.cpython-311.pyc index 57c07974..600d39de 100644 Binary files a/.venv/Lib/site-packages/Cython/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/Cython/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-311.pyc index 60cdae4e..53f297f7 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-311.pyc index 12d9bcd9..51fc219d 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-311.pyc index 4972f26c..1de21167 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/Image.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/Image.cpython-311.pyc index 51f8c26f..1e6cc7fe 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/Image.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/Image.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-311.pyc index 9be72877..be2d474b 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-311.pyc index 12e0cee8..be7fe396 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-311.pyc index 8cd20016..f8ab60a5 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-311.pyc index 6e5f3eaa..cbd38baf 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-311.pyc index 7cbfc464..b4e32947 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-311.pyc index 938daf2e..8f6f70d4 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-311.pyc index 37a8a41a..9ac2e35c 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-311.pyc index 5c6451ea..86672c4f 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-311.pyc index 722c7a2d..220a73bf 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/__init__.cpython-311.pyc index 85a71c6f..052f7c49 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/_binary.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/_binary.cpython-311.pyc index 04ef5629..b1b2f59f 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/_binary.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/_binary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-311.pyc index a4a69905..5312d7d5 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/_typing.cpython-311.pyc index ee2a73f4..378048bd 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/_typing.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/_util.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/_util.cpython-311.pyc index f4d16ce8..8a2f61e6 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/_util.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/PIL/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/PIL/__pycache__/_version.cpython-311.pyc index 89b0471d..6d472caf 100644 Binary files a/.venv/Lib/site-packages/PIL/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/PIL/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/__pycache__/__init__.cpython-311.pyc index 06630f62..da583eb2 100644 Binary files a/.venv/Lib/site-packages/TTS/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/TTS/__pycache__/api.cpython-311.pyc index a8ab76d8..484a6e15 100644 Binary files a/.venv/Lib/site-packages/TTS/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/__pycache__/model.cpython-311.pyc b/.venv/Lib/site-packages/TTS/__pycache__/model.cpython-311.pyc index 974a909e..3e205f5f 100644 Binary files a/.venv/Lib/site-packages/TTS/__pycache__/model.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/__pycache__/model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/config/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/config/__pycache__/__init__.cpython-311.pyc index 03c51ac7..b48fe7a8 100644 Binary files a/.venv/Lib/site-packages/TTS/config/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/config/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/config/__pycache__/shared_configs.cpython-311.pyc b/.venv/Lib/site-packages/TTS/config/__pycache__/shared_configs.cpython-311.pyc index c0d4a08c..cd3ede0c 100644 Binary files a/.venv/Lib/site-packages/TTS/config/__pycache__/shared_configs.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/config/__pycache__/shared_configs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/encoder/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/encoder/__pycache__/__init__.cpython-311.pyc index af65193b..d8f0713b 100644 Binary files a/.venv/Lib/site-packages/TTS/encoder/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/encoder/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/encoder/__pycache__/losses.cpython-311.pyc b/.venv/Lib/site-packages/TTS/encoder/__pycache__/losses.cpython-311.pyc index 259de2e9..5b140306 100644 Binary files a/.venv/Lib/site-packages/TTS/encoder/__pycache__/losses.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/encoder/__pycache__/losses.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/base_encoder.cpython-311.pyc b/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/base_encoder.cpython-311.pyc index a7edd95d..9da4622a 100644 Binary files a/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/base_encoder.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/base_encoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/lstm.cpython-311.pyc b/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/lstm.cpython-311.pyc index 935fbfac..50dd82b3 100644 Binary files a/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/lstm.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/lstm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/resnet.cpython-311.pyc b/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/resnet.cpython-311.pyc index cd623aea..0516de90 100644 Binary files a/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/resnet.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/encoder/models/__pycache__/resnet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/__init__.cpython-311.pyc index e6d0ceab..69d7716d 100644 Binary files a/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/generic_utils.cpython-311.pyc b/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/generic_utils.cpython-311.pyc index 1c6b9e10..b093d5cc 100644 Binary files a/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/generic_utils.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/encoder/utils/__pycache__/generic_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/__pycache__/__init__.cpython-311.pyc index ff65a58f..e97a34e6 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/__init__.cpython-311.pyc index 9e701a71..6b4cf979 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/shared_configs.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/shared_configs.cpython-311.pyc index a87db917..a6f3b2dc 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/shared_configs.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/shared_configs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/vits_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/vits_config.cpython-311.pyc index 18c3fcf7..d892fca4 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/vits_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/vits_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/xtts_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/xtts_config.cpython-311.pyc index 277f78fb..10bcf1c8 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/xtts_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/configs/__pycache__/xtts_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/__init__.cpython-311.pyc index c13f0eb5..2c1edb78 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/dataset.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/dataset.cpython-311.pyc index edad9744..15da39b6 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/dataset.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/dataset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/formatters.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/formatters.cpython-311.pyc index 1bbf1b79..a3af5c02 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/formatters.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/datasets/__pycache__/formatters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/__init__.cpython-311.pyc index 82d63b0d..19010581 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/losses.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/losses.cpython-311.pyc index dd03b4fd..8e6044ab 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/losses.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/__pycache__/losses.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/__init__.cpython-311.pyc index 3c062234..84804e76 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/normalization.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/normalization.cpython-311.pyc index fe8a8749..b4a5c3fc 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/normalization.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/normalization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/wavenet.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/wavenet.cpython-311.pyc index 8ea838fd..65423c80 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/wavenet.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/generic/__pycache__/wavenet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/__init__.cpython-311.pyc index d8388a99..7d71ba39 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/duration_predictor.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/duration_predictor.cpython-311.pyc index f89995e2..340b4d6a 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/duration_predictor.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/duration_predictor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/glow.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/glow.cpython-311.pyc index cd61fe12..18e8f5eb 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/glow.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/glow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/transformer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/transformer.cpython-311.pyc index ae16bee3..1bac6f38 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/transformer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/glow_tts/__pycache__/transformer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/discriminator.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/discriminator.cpython-311.pyc index 4f6dad95..1f878080 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/discriminator.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/discriminator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/networks.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/networks.cpython-311.pyc index 280c3927..6fa0d640 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/networks.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/networks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/stochastic_duration_predictor.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/stochastic_duration_predictor.cpython-311.pyc index 6048de3b..d72ec597 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/stochastic_duration_predictor.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/stochastic_duration_predictor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/transforms.cpython-311.pyc index 16d9ea96..65789b55 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/vits/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt.cpython-311.pyc index 2cb93d6c..17f0b963 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt_inference.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt_inference.cpython-311.pyc index fb0e1997..44489048 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt_inference.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/gpt_inference.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/hifigan_decoder.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/hifigan_decoder.cpython-311.pyc index df3a0f72..14aba931 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/hifigan_decoder.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/hifigan_decoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/latent_encoder.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/latent_encoder.cpython-311.pyc index 042ad868..2bae23f2 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/latent_encoder.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/latent_encoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/perceiver_encoder.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/perceiver_encoder.cpython-311.pyc index 026713d0..6c72d989 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/perceiver_encoder.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/perceiver_encoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/stream_generator.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/stream_generator.cpython-311.pyc index 23e4469c..7c33844d 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/stream_generator.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/stream_generator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/tokenizer.cpython-311.pyc index a04f05c2..112ab851 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/xtts_manager.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/xtts_manager.cpython-311.pyc index 4df6335b..8832519f 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/xtts_manager.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/xtts_manager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/zh_num2words.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/zh_num2words.cpython-311.pyc index a7cb74b3..c2e38b9f 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/zh_num2words.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/layers/xtts/__pycache__/zh_num2words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/__init__.cpython-311.pyc index 04a2035f..00ed06f3 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/base_tts.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/base_tts.cpython-311.pyc index 7093bb7a..8587246d 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/base_tts.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/base_tts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/vits.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/vits.cpython-311.pyc index 114d44d5..749b56ac 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/vits.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/vits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/xtts.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/xtts.cpython-311.pyc index a20f9a90..91bd064c 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/models/__pycache__/xtts.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/models/__pycache__/xtts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/__init__.cpython-311.pyc index 23b7743b..8ac44acb 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/data.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/data.cpython-311.pyc index e73e87c5..2a8479a7 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/data.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/fairseq.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/fairseq.cpython-311.pyc index 7b4a1cc3..a076423c 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/fairseq.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/fairseq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/helpers.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/helpers.cpython-311.pyc index 229a26ec..d9961834 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/helpers.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/languages.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/languages.cpython-311.pyc index 695838c1..1bb1d71e 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/languages.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/languages.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/managers.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/managers.cpython-311.pyc index 9e6ca73a..b11aa045 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/managers.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/managers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/speakers.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/speakers.cpython-311.pyc index 09f17680..d37c1dd8 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/speakers.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/speakers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/ssim.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/ssim.cpython-311.pyc index d14380a3..8bed478c 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/ssim.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/ssim.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/synthesis.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/synthesis.cpython-311.pyc index b819b8c4..6bd0a816 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/synthesis.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/synthesis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/visual.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/visual.cpython-311.pyc index a63040c6..1e3f7361 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/visual.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/__pycache__/visual.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/monotonic_align/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/monotonic_align/__pycache__/__init__.cpython-311.pyc index 0259f058..5f504a59 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/monotonic_align/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/monotonic_align/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/__init__.cpython-311.pyc index cc48cc0b..9752de6e 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/characters.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/characters.cpython-311.pyc index a09c88d2..2517caa1 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/characters.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/characters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/cleaners.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/cleaners.cpython-311.pyc index 44660162..9c762f72 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/cleaners.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/cleaners.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/punctuation.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/punctuation.cpython-311.pyc index 63904044..7c16151f 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/punctuation.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/punctuation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/tokenizer.cpython-311.pyc index 99d82775..bce78ccb 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/__pycache__/tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/__init__.cpython-311.pyc index f6c1b5ab..ef233fca 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/phonemizer.cpython-311.pyc index 1d6ad2e3..09baae58 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/bangla/__pycache__/phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/__init__.cpython-311.pyc index 1514f53d..f0106fd6 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/phonemizer.cpython-311.pyc index 30a3f533..50319eee 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/belarusian/__pycache__/phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/__init__.cpython-311.pyc index 6b1a248e..aabd68c1 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/numbers.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/numbers.cpython-311.pyc index cb768935..dcd5936a 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/numbers.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/numbers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/phonemizer.cpython-311.pyc index 027097bf..a12e3f19 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/pinyinToPhonemes.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/pinyinToPhonemes.cpython-311.pyc index c48ae311..dd4b8a56 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/pinyinToPhonemes.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/chinese_mandarin/__pycache__/pinyinToPhonemes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/__init__.cpython-311.pyc index c93126cd..ed6d2e87 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/abbreviations.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/abbreviations.cpython-311.pyc index 92caeb4f..29bb64fe 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/abbreviations.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/abbreviations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/number_norm.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/number_norm.cpython-311.pyc index d754efc1..79332f12 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/number_norm.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/number_norm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/time_norm.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/time_norm.cpython-311.pyc index eedab4af..63c875cd 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/time_norm.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/english/__pycache__/time_norm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/__init__.cpython-311.pyc index 22d50d6a..f5b95e1c 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/abbreviations.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/abbreviations.cpython-311.pyc index 619a2552..fcdfe251 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/abbreviations.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/french/__pycache__/abbreviations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/__init__.cpython-311.pyc index 5c4d2baf..0a0def8f 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/phonemizer.cpython-311.pyc index f875487a..4a7848d3 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/japanese/__pycache__/phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/__init__.cpython-311.pyc index a2a79fba..f7f19349 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/ko_dictionary.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/ko_dictionary.cpython-311.pyc index 7c96ffda..fa962c84 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/ko_dictionary.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/ko_dictionary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/korean.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/korean.cpython-311.pyc index 5a1353a2..7b60a184 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/korean.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/korean.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/phonemizer.cpython-311.pyc index 2e091a8c..5f158f0d 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/korean/__pycache__/phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/__init__.cpython-311.pyc index 0a1bbd97..1c597787 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/bangla_phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/bangla_phonemizer.cpython-311.pyc index cf1ab045..b397245e 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/bangla_phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/bangla_phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/base.cpython-311.pyc index 75612bdc..53ec25b4 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/belarusian_phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/belarusian_phonemizer.cpython-311.pyc index 5b3c09dc..ea39ddad 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/belarusian_phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/belarusian_phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/espeak_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/espeak_wrapper.cpython-311.pyc index 2b3b74dc..ac02235a 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/espeak_wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/espeak_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/gruut_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/gruut_wrapper.cpython-311.pyc index 715a3d35..9af914ba 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/gruut_wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/gruut_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ja_jp_phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ja_jp_phonemizer.cpython-311.pyc index 1d55cf46..2dd0dd47 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ja_jp_phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ja_jp_phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ko_kr_phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ko_kr_phonemizer.cpython-311.pyc index 75d5a56f..950ca321 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ko_kr_phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/ko_kr_phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/multi_phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/multi_phonemizer.cpython-311.pyc index fed5f1e4..50453c14 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/multi_phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/multi_phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/zh_cn_phonemizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/zh_cn_phonemizer.cpython-311.pyc index 05145802..c9a42a3f 100644 Binary files a/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/zh_cn_phonemizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/tts/utils/text/phonemizers/__pycache__/zh_cn_phonemizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/__pycache__/__init__.cpython-311.pyc index 5ae49919..2f2b86e7 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/__pycache__/generic_utils.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/__pycache__/generic_utils.cpython-311.pyc index e58ec202..2483183a 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/__pycache__/generic_utils.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/__pycache__/generic_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/__pycache__/io.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/__pycache__/io.cpython-311.pyc index 98803dc8..7540030e 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/__pycache__/io.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/__pycache__/io.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/__pycache__/manage.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/__pycache__/manage.cpython-311.pyc index c42169cd..576a0979 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/__pycache__/manage.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/__pycache__/manage.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/__pycache__/samplers.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/__pycache__/samplers.cpython-311.pyc index 73b5bfd7..b7da615a 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/__pycache__/samplers.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/__pycache__/samplers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/__pycache__/synthesizer.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/__pycache__/synthesizer.cpython-311.pyc index 41f2f0c9..39967025 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/__pycache__/synthesizer.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/__pycache__/synthesizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/__init__.cpython-311.pyc index 6f63df5b..53f6aa33 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/numpy_transforms.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/numpy_transforms.cpython-311.pyc index 53b07617..1244c9f1 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/numpy_transforms.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/numpy_transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/processor.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/processor.cpython-311.pyc index 616d39fb..9eb87c8d 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/processor.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/processor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/torch_transforms.cpython-311.pyc b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/torch_transforms.cpython-311.pyc index 34ae468a..cb69f407 100644 Binary files a/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/torch_transforms.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/utils/audio/__pycache__/torch_transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vc/configs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vc/configs/__pycache__/__init__.cpython-311.pyc index b40557ea..f49ae09d 100644 Binary files a/.venv/Lib/site-packages/TTS/vc/configs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vc/configs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vc/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vc/models/__pycache__/__init__.cpython-311.pyc index d1f3d79a..39d8ea42 100644 Binary files a/.venv/Lib/site-packages/TTS/vc/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vc/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/__pycache__/__init__.cpython-311.pyc index d64780d4..5b801826 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/__init__.cpython-311.pyc index 7343443b..219747a6 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/fullband_melgan_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/fullband_melgan_config.cpython-311.pyc index 02574435..01901de2 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/fullband_melgan_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/fullband_melgan_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/hifigan_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/hifigan_config.cpython-311.pyc index a180b1e6..436c480b 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/hifigan_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/hifigan_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/melgan_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/melgan_config.cpython-311.pyc index bbfb9047..cdc526f8 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/melgan_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/melgan_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/multiband_melgan_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/multiband_melgan_config.cpython-311.pyc index 54c8249b..5395d446 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/multiband_melgan_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/multiband_melgan_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/parallel_wavegan_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/parallel_wavegan_config.cpython-311.pyc index 52eafde4..8aea508b 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/parallel_wavegan_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/parallel_wavegan_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/shared_configs.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/shared_configs.cpython-311.pyc index 0dad09b9..88f5ddfd 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/shared_configs.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/shared_configs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/univnet_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/univnet_config.cpython-311.pyc index 04dce242..46d4bd32 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/univnet_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/univnet_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavegrad_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavegrad_config.cpython-311.pyc index f143f964..c83d6ba6 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavegrad_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavegrad_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavernn_config.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavernn_config.cpython-311.pyc index 7f6a8450..d9f2ae4d 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavernn_config.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/configs/__pycache__/wavernn_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/__init__.cpython-311.pyc index 108d09ec..a13835c0 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/gan_dataset.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/gan_dataset.cpython-311.pyc index e93ffaa6..b4e3fb5b 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/gan_dataset.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/gan_dataset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/preprocess.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/preprocess.cpython-311.pyc index e0639c70..d0486035 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/preprocess.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/preprocess.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavegrad_dataset.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavegrad_dataset.cpython-311.pyc index 13f5b3b3..20c675e0 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavegrad_dataset.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavegrad_dataset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavernn_dataset.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavernn_dataset.cpython-311.pyc index 9b70eae8..2c6f16fc 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavernn_dataset.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/datasets/__pycache__/wavernn_dataset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/__init__.cpython-311.pyc index 5bb30491..d37937f4 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/losses.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/losses.cpython-311.pyc index 6c407fdb..c9835202 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/losses.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/losses.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/wavegrad.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/wavegrad.cpython-311.pyc index 33ee242a..de946a16 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/wavegrad.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/layers/__pycache__/wavegrad.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/__init__.cpython-311.pyc index cbf0ae37..a2985e09 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/base_vocoder.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/base_vocoder.cpython-311.pyc index 42e23a9c..42a7ff76 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/base_vocoder.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/base_vocoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_discriminator.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_discriminator.cpython-311.pyc index a11e2c0a..6807aaac 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_discriminator.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_discriminator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_generator.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_generator.cpython-311.pyc index 5085e0c6..855bdd6e 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_generator.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/hifigan_generator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavegrad.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavegrad.cpython-311.pyc index 28c80281..c877ef9e 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavegrad.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavegrad.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavernn.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavernn.cpython-311.pyc index 20d5e8aa..adcee684 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavernn.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/models/__pycache__/wavernn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/__init__.cpython-311.pyc index 0d9845a9..9a43bd75 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/distribution.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/distribution.cpython-311.pyc index f8ef2027..cb5a591a 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/distribution.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/distribution.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/generic_utils.cpython-311.pyc b/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/generic_utils.cpython-311.pyc index 4625d269..e3d31e1b 100644 Binary files a/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/generic_utils.cpython-311.pyc and b/.venv/Lib/site-packages/TTS/vocoder/utils/__pycache__/generic_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/__pycache__/_soundfile.cpython-311.pyc b/.venv/Lib/site-packages/__pycache__/_soundfile.cpython-311.pyc index 72c27a6e..93df3365 100644 Binary files a/.venv/Lib/site-packages/__pycache__/_soundfile.cpython-311.pyc and b/.venv/Lib/site-packages/__pycache__/_soundfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/__pycache__/cython.cpython-311.pyc b/.venv/Lib/site-packages/__pycache__/cython.cpython-311.pyc index a566091a..91e4dc01 100644 Binary files a/.venv/Lib/site-packages/__pycache__/cython.cpython-311.pyc and b/.venv/Lib/site-packages/__pycache__/cython.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/__pycache__/decorator.cpython-311.pyc b/.venv/Lib/site-packages/__pycache__/decorator.cpython-311.pyc index f7e0afea..fb74fb10 100644 Binary files a/.venv/Lib/site-packages/__pycache__/decorator.cpython-311.pyc and b/.venv/Lib/site-packages/__pycache__/decorator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/__pycache__/six.cpython-311.pyc b/.venv/Lib/site-packages/__pycache__/six.cpython-311.pyc index 9f6f0092..7b8e7ec6 100644 Binary files a/.venv/Lib/site-packages/__pycache__/six.cpython-311.pyc and b/.venv/Lib/site-packages/__pycache__/six.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/__pycache__/soundfile.cpython-311.pyc b/.venv/Lib/site-packages/__pycache__/soundfile.cpython-311.pyc index 699616b9..45bf0919 100644 Binary files a/.venv/Lib/site-packages/__pycache__/soundfile.cpython-311.pyc and b/.venv/Lib/site-packages/__pycache__/soundfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/__pycache__/threadpoolctl.cpython-311.pyc b/.venv/Lib/site-packages/__pycache__/threadpoolctl.cpython-311.pyc index 91334a58..91039485 100644 Binary files a/.venv/Lib/site-packages/__pycache__/threadpoolctl.cpython-311.pyc and b/.venv/Lib/site-packages/__pycache__/threadpoolctl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/__pycache__/typing_extensions.cpython-311.pyc b/.venv/Lib/site-packages/__pycache__/typing_extensions.cpython-311.pyc index 00377f23..a91960c8 100644 Binary files a/.venv/Lib/site-packages/__pycache__/typing_extensions.cpython-311.pyc and b/.venv/Lib/site-packages/__pycache__/typing_extensions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc index 75aca5fc..19f22c66 100644 Binary files a/.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc b/.venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc index 5071d562..d2daac2c 100644 Binary files a/.venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc and b/.venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/_soundfile_data/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/_soundfile_data/__pycache__/__init__.cpython-311.pyc index a790ff62..cbd0f706 100644 Binary files a/.venv/Lib/site-packages/_soundfile_data/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/_soundfile_data/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-311.pyc index 9219a507..1790dbf1 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-311.pyc index fc3dcf5a..335f0032 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-311.pyc index 52778c6d..d73ced84 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-311.pyc index 49634cd8..e9b8cd4a 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc index a7290e2e..4fc2a8f3 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc index d4787c20..f7e521d6 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc index a0ab324e..7025e5d0 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc index 1a895970..a4dbcca3 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-311.pyc index 7b75b314..215ac715 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc index 07c1cf5f..a2332119 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-311.pyc index 80a84b1f..cf050b9f 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-311.pyc index d66e2c34..efac362e 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-311.pyc index e87a88f6..0522b0db 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-311.pyc index 9fa63b69..7b8750b8 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-311.pyc index ee333e1d..34c01ba6 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-311.pyc index 9c962343..8863018e 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-311.pyc index fb383f17..91ae67ba 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc index 49e0b18d..48185f1e 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-311.pyc index 8ff7e3f1..3c9f009b 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/locks.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/locks.cpython-311.pyc index 1d480c17..f4fb8ca3 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/locks.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/locks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-311.pyc index 1f067c55..ebb29176 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc index 7cbd08f5..2e0ae91a 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-311.pyc index d15fd47e..82520bbf 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc index bdd9b33e..7c3bed39 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-311.pyc index f8975856..6166ff9e 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-311.pyc index 3c46d0c8..8d69cf0a 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc index 25a45498..6890f4da 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-311.pyc index 522fb4ab..3547477e 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-311.pyc index 1b31286a..d127c4b2 100644 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-311.pyc and b/.venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-311.pyc index 5637a617..a6f6954b 100644 Binary files a/.venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc index 78b40fb4..f99e0103 100644 Binary files a/.venv/Lib/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyascii/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/anyascii/__pycache__/__init__.cpython-311.pyc index 1bd8c2ec..42dd04bf 100644 Binary files a/.venv/Lib/site-packages/anyascii/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/anyascii/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/INSTALLER b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/INSTALLER similarity index 100% rename from .venv/Lib/site-packages/decorator-5.1.1.dist-info/INSTALLER rename to .venv/Lib/site-packages/anyio-4.3.0.dist-info/INSTALLER diff --git a/.venv/Lib/site-packages/anyio-4.3.0.dist-info/LICENSE b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/LICENSE new file mode 100644 index 00000000..104eebf5 --- /dev/null +++ b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/Lib/site-packages/anyio-4.3.0.dist-info/METADATA b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/METADATA new file mode 100644 index 00000000..e02715ca --- /dev/null +++ b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 4.3.0 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Author-email: Alex Grönholm +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: idna >=2.8 +Requires-Dist: sniffio >=1.1 +Requires-Dist: exceptiongroup >=1.0.2 ; python_version < "3.11" +Requires-Dist: typing-extensions >=4.1 ; python_version < "3.11" +Provides-Extra: doc +Requires-Dist: packaging ; extra == 'doc' +Requires-Dist: Sphinx >=7 ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc' +Provides-Extra: test +Requires-Dist: anyio[trio] ; extra == 'test' +Requires-Dist: coverage[toml] >=7 ; extra == 'test' +Requires-Dist: exceptiongroup >=1.2.0 ; extra == 'test' +Requires-Dist: hypothesis >=4.0 ; extra == 'test' +Requires-Dist: psutil >=5.9 ; extra == 'test' +Requires-Dist: pytest >=7.0 ; extra == 'test' +Requires-Dist: pytest-mock >=3.6.1 ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: uvloop >=0.17 ; (platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio >=0.23 ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with the native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High-level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ diff --git a/.venv/Lib/site-packages/anyio-4.3.0.dist-info/RECORD b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/RECORD new file mode 100644 index 00000000..b7daa35c --- /dev/null +++ b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-4.3.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-4.3.0.dist-info/METADATA,sha256=Xy5N0vn6s_rjN1C0EbX_z1N_lf3bZIJu88RAQxVAsrI,4599 +anyio-4.3.0.dist-info/RECORD,, +anyio-4.3.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +anyio-4.3.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-4.3.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=CxUxIHOIONI3KpsDLCg-dI6lQaDkW_4Zhtu5jWt1XO8,4344 +anyio/__pycache__/__init__.cpython-311.pyc,, +anyio/__pycache__/from_thread.cpython-311.pyc,, +anyio/__pycache__/lowlevel.cpython-311.pyc,, +anyio/__pycache__/pytest_plugin.cpython-311.pyc,, +anyio/__pycache__/to_process.cpython-311.pyc,, +anyio/__pycache__/to_thread.cpython-311.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-311.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-311.pyc,, +anyio/_backends/__pycache__/_trio.cpython-311.pyc,, +anyio/_backends/_asyncio.py,sha256=D1ME_t4zfei7sm7Ic8jEQBsAc3uMykvsQjZxhT5aTNg,81968 +anyio/_backends/_trio.py,sha256=87ahML2dsX3uQuCkyIB4E7XFK6jAGpxs5hVlGmHHd8Q,35642 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-311.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-311.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-311.pyc,, +anyio/_core/__pycache__/_fileio.cpython-311.pyc,, +anyio/_core/__pycache__/_resources.cpython-311.pyc,, +anyio/_core/__pycache__/_signals.cpython-311.pyc,, +anyio/_core/__pycache__/_sockets.cpython-311.pyc,, +anyio/_core/__pycache__/_streams.cpython-311.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-311.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-311.pyc,, +anyio/_core/__pycache__/_tasks.cpython-311.pyc,, +anyio/_core/__pycache__/_testing.cpython-311.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-311.pyc,, +anyio/_core/_eventloop.py,sha256=uCwWwGtN9Tf46nkcWLyku8iYEFWCkSjPW0AkDbnpCM0,4408 +anyio/_core/_exceptions.py,sha256=wUmhDu80qEB7z9EdCqUwVEhNUlNEok4_W2-rC6sCAUQ,2078 +anyio/_core/_fileio.py,sha256=Zdp3L0_T7mMCaYq3bFTCb-udTnIknKpzcLEvHK-Tmbc,19512 +anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 +anyio/_core/_signals.py,sha256=rDOVxtugZDgC5AhfW3lrwsre2n9Pj_adoRUidBiF6dA,878 +anyio/_core/_sockets.py,sha256=HwOMg0xUPw0T7N-aipxq_4OEM703llh3I9_YIg9a4XM,24048 +anyio/_core/_streams.py,sha256=Z8ZlTY6xom5EszrMsgCT3TphiT4JIlQG-y33CrD0NQY,1811 +anyio/_core/_subprocesses.py,sha256=ZLLNXAtlRGfbyC4sOIltYB1k3NJa3tqk_x_Fsnbcs1M,5272 +anyio/_core/_synchronization.py,sha256=h3o6dWWbzVrcNmi7i2mQjEgRtnIxkGtjmYK7KMpdlaE,18444 +anyio/_core/_tasks.py,sha256=pvVEX2Fw159sf0ypAPerukKsZgRRwvFFedVW52nR2Vk,4764 +anyio/_core/_testing.py,sha256=i97S5rSWIFqfCGPm4mEMdiJaUpVskk-cWEjarWTeXXs,1964 +anyio/_core/_typedattr.py,sha256=QTbaIwZEewhwAKvbBHFBcO_cRhNP_lXjAobEldzExCU,2499 +anyio/abc/__init__.py,sha256=U44_s3BglL8BojWQiq0KuokvCqkunIp-ySH3GyRXxAc,2681 +anyio/abc/__pycache__/__init__.cpython-311.pyc,, +anyio/abc/__pycache__/_eventloop.cpython-311.pyc,, +anyio/abc/__pycache__/_resources.cpython-311.pyc,, +anyio/abc/__pycache__/_sockets.cpython-311.pyc,, +anyio/abc/__pycache__/_streams.cpython-311.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-311.pyc,, +anyio/abc/__pycache__/_tasks.cpython-311.pyc,, +anyio/abc/__pycache__/_testing.cpython-311.pyc,, +anyio/abc/_eventloop.py,sha256=QOtkEHCkoE8czGu4RNzZ_q-xNjC0nRyoS0QQJ5KTvYU,10097 +anyio/abc/_resources.py,sha256=KBJP3wGbvSfKfTjfOLL4QCJdeiaNwqqF_6FwPsmQssM,763 +anyio/abc/_sockets.py,sha256=XdZ42TQ1omZN9Ec3HUfTMWG_i-21yMjXQ_FFslAZtzQ,6269 +anyio/abc/_streams.py,sha256=GzST5Q2zQmxVzdrAqtbSyHNxkPlIC9AzeZJg_YyPAXw,6598 +anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 +anyio/abc/_tasks.py,sha256=q3bEbCF46I2tQjYSbRdbaavq0R_HOV9JAjzQr8biprU,2747 +anyio/abc/_testing.py,sha256=EiWEaIVy15lHszO000Xp4FsB13NbBvC1BpUci47B5zs,1829 +anyio/from_thread.py,sha256=UTEY_NsiqQRukO3L3riQx4Eegulj3RyLlbITJz7pvLM,15749 +anyio/lowlevel.py,sha256=0awnMh05kA5WUNaOBoQZSImBj0xLNRlYOuMGGiztWnM,4185 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=TBgRAfT-Oxy6efhO1Tziq54NND3Jy4dRmwkMmQXSvhI,5386 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-311.pyc,, +anyio/streams/__pycache__/buffered.cpython-311.pyc,, +anyio/streams/__pycache__/file.cpython-311.pyc,, +anyio/streams/__pycache__/memory.cpython-311.pyc,, +anyio/streams/__pycache__/stapled.cpython-311.pyc,, +anyio/streams/__pycache__/text.cpython-311.pyc,, +anyio/streams/__pycache__/tls.cpython-311.pyc,, +anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500 +anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383 +anyio/streams/memory.py,sha256=bqN9YwAPA6ZtdohOsq_YBpLFlRHR5k-W8y0pD_jznb8,9296 +anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302 +anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094 +anyio/streams/tls.py,sha256=ev-6yNOGcIkziIkcIfKj8VmLqQJW-iDBJttaKgKDsF4,12752 +anyio/to_process.py,sha256=lx_bt0CUJsS1eSlraw662OpCjRgGXowoyf1Q-i-kOxo,9535 +anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396 diff --git a/.venv/Lib/site-packages/anyio-4.3.0.dist-info/WHEEL b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/WHEEL new file mode 100644 index 00000000..98c0d20b --- /dev/null +++ b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/anyio-4.3.0.dist-info/entry_points.txt b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/entry_points.txt new file mode 100644 index 00000000..44dd9bdc --- /dev/null +++ b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/.venv/Lib/site-packages/anyio-4.3.0.dist-info/top_level.txt b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/top_level.txt new file mode 100644 index 00000000..c77c069e --- /dev/null +++ b/.venv/Lib/site-packages/anyio-4.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/.venv/Lib/site-packages/anyio/__init__.py b/.venv/Lib/site-packages/anyio/__init__.py new file mode 100644 index 00000000..7bfe2316 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/__init__.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from typing import Any + +from ._core._eventloop import current_time as current_time +from ._core._eventloop import get_all_backends as get_all_backends +from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class +from ._core._eventloop import run as run +from ._core._eventloop import sleep as sleep +from ._core._eventloop import sleep_forever as sleep_forever +from ._core._eventloop import sleep_until as sleep_until +from ._core._exceptions import BrokenResourceError as BrokenResourceError +from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess +from ._core._exceptions import BusyResourceError as BusyResourceError +from ._core._exceptions import ClosedResourceError as ClosedResourceError +from ._core._exceptions import DelimiterNotFound as DelimiterNotFound +from ._core._exceptions import EndOfStream as EndOfStream +from ._core._exceptions import IncompleteRead as IncompleteRead +from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError +from ._core._exceptions import WouldBlock as WouldBlock +from ._core._fileio import AsyncFile as AsyncFile +from ._core._fileio import Path as Path +from ._core._fileio import open_file as open_file +from ._core._fileio import wrap_file as wrap_file +from ._core._resources import aclose_forcefully as aclose_forcefully +from ._core._signals import open_signal_receiver as open_signal_receiver +from ._core._sockets import connect_tcp as connect_tcp +from ._core._sockets import connect_unix as connect_unix +from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket +from ._core._sockets import ( + create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, +) +from ._core._sockets import create_tcp_listener as create_tcp_listener +from ._core._sockets import create_udp_socket as create_udp_socket +from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket +from ._core._sockets import create_unix_listener as create_unix_listener +from ._core._sockets import getaddrinfo as getaddrinfo +from ._core._sockets import getnameinfo as getnameinfo +from ._core._sockets import wait_socket_readable as wait_socket_readable +from ._core._sockets import wait_socket_writable as wait_socket_writable +from ._core._streams import create_memory_object_stream as create_memory_object_stream +from ._core._subprocesses import open_process as open_process +from ._core._subprocesses import run_process as run_process +from ._core._synchronization import CapacityLimiter as CapacityLimiter +from ._core._synchronization import ( + CapacityLimiterStatistics as CapacityLimiterStatistics, +) +from ._core._synchronization import Condition as Condition +from ._core._synchronization import ConditionStatistics as ConditionStatistics +from ._core._synchronization import Event as Event +from ._core._synchronization import EventStatistics as EventStatistics +from ._core._synchronization import Lock as Lock +from ._core._synchronization import LockStatistics as LockStatistics +from ._core._synchronization import ResourceGuard as ResourceGuard +from ._core._synchronization import Semaphore as Semaphore +from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics +from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED +from ._core._tasks import CancelScope as CancelScope +from ._core._tasks import create_task_group as create_task_group +from ._core._tasks import current_effective_deadline as current_effective_deadline +from ._core._tasks import fail_after as fail_after +from ._core._tasks import move_on_after as move_on_after +from ._core._testing import TaskInfo as TaskInfo +from ._core._testing import get_current_task as get_current_task +from ._core._testing import get_running_tasks as get_running_tasks +from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider +from ._core._typedattr import TypedAttributeSet as TypedAttributeSet +from ._core._typedattr import typed_attribute as typed_attribute + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio."): + value.__module__ = __name__ diff --git a/.venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..fbbfcc2b Binary files /dev/null and b/.venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-311.pyc b/.venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-311.pyc new file mode 100644 index 00000000..df294b9d Binary files /dev/null and b/.venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-311.pyc b/.venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-311.pyc new file mode 100644 index 00000000..646940fa Binary files /dev/null and b/.venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-311.pyc b/.venv/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-311.pyc new file mode 100644 index 00000000..3e0674d2 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/__pycache__/to_process.cpython-311.pyc b/.venv/Lib/site-packages/anyio/__pycache__/to_process.cpython-311.pyc new file mode 100644 index 00000000..88aae681 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/__pycache__/to_process.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-311.pyc b/.venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-311.pyc new file mode 100644 index 00000000..e69ecf73 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_backends/__init__.py b/.venv/Lib/site-packages/anyio/_backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..0866cc34 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-311.pyc new file mode 100644 index 00000000..060a0ffd Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-311.pyc new file mode 100644 index 00000000..ec6cd53a Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_backends/_asyncio.py b/.venv/Lib/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 00000000..2699bf81 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2478 @@ +from __future__ import annotations + +import array +import asyncio +import concurrent.futures +import math +import socket +import sys +import threading +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from collections.abc import AsyncIterator, Generator, Iterable +from concurrent.futures import Future +from contextlib import suppress +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + getcoroutinestate, + iscoroutine, +) +from io import IOBase +from os import PathLike +from queue import Queue +from signal import Signals +from socket import AddressFamily, SocketKind +from threading import Thread +from types import TracebackType +from typing import ( + IO, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + ContextManager, + Coroutine, + Mapping, + Optional, + Sequence, + Tuple, + TypeVar, + cast, +) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) +from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + if self._state is not _State.INITIALIZED: + return + try: + loop = self._loop + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt() + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() + + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + +_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + try: + return getcoroutinestate(task.get_coro()) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() + self._cancel_called = False + self._cancelled_caught = False + self._active = False + self._timeout_handle: asyncio.TimerHandle | None = None + self._cancel_handle: asyncio.Handle | None = None + self._tasks: set[asyncio.Task] = set() + self._host_task: asyncio.Task | None = None + self._cancel_calls: int = 0 + self._cancelling: int | None = None + + def __enter__(self) -> CancelScope: + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_state = TaskState(None, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + if self._parent_scope is not None: + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.remove(host_task) + + self._timeout() + self._active = True + if sys.version_info >= (3, 11): + self._cancelling = self._host_task.cancelling() + + # Start cancelling the host task if the scope was cancelled before entering + if self._cancel_called: + self._deliver_cancellation(self) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the closest directly cancelled parent + # scope if this one was shielded + self._restart_cancellation_in_parent() + + if self._cancel_called and exc_val is not None: + for exc in iterate_exceptions(exc_val): + if isinstance(exc, CancelledError): + self._cancelled_caught = self._uncancel(exc) + if self._cancelled_caught: + break + + return self._cancelled_caught + + return None + + def _uncancel(self, cancelled_exc: CancelledError) -> bool: + if sys.version_info < (3, 9) or self._host_task is None: + self._cancel_calls = 0 + return True + + # Undo all cancellations done by this scope + if self._cancelling is not None: + while self._cancel_calls: + self._cancel_calls -= 1 + if self._host_task.uncancel() <= self._cancelling: + return True + + self._cancel_calls = 0 + return f"Cancelled by cancel scope {id(self):x}" in cancelled_exc.args + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self, origin: CancelScope) -> bool: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + + """ + should_retry = False + current = current_task() + for task in self._tasks: + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started + should_retry = True + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): + self._cancel_calls += 1 + if sys.version_info >= (3, 9): + task.cancel(f"Cancelled by cancel scope {id(origin):x}") + else: + task.cancel() + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry + + # Schedule another callback if there are still tasks left + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None + + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ + scope = self._parent_scope + while scope is not None: + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def _parent_cancelled(self) -> bool: + # Check whether any parent has been cancelled + cancel_scope = self._parent_scope + while cancel_scope is not None and not cancel_scope._shield: + if cancel_scope._cancel_called: + return True + else: + cancel_scope = cancel_scope._parent_scope + + return False + + def cancel(self) -> None: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + if self._host_task is not None: + self._deliver_cancellation(self) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._restart_cancellation_in_parent() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "cancel_scope" + + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): + self.parent_id = parent_id + self.cancel_scope = cancel_scope + + +_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState] + + +# +# Task groups +# + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: T_contra | None = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() + + async def __aenter__(self) -> TaskGroup: + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if exc_val is not None: + self.cancel_scope.cancel() + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) + + cancelled_exc_while_waiting_tasks: CancelledError | None = None + while self._tasks: + try: + await asyncio.wait(self._tasks) + except CancelledError as exc: + # This task was cancelled natively; reraise the CancelledError later + # unless this task was already interrupted by another exception + self.cancel_scope.cancel() + if cancelled_exc_while_waiting_tasks is None: + cancelled_exc_while_waiting_tasks = exc + + self._active = False + if self._exceptions: + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) + + # Raise the CancelledError received while waiting for child tasks to exit, + # unless the context manager itself was previously exited with another + # exception, or if any of the child tasks raised an exception other than + # CancelledError + if cancelled_exc_while_waiting_tasks: + if exc_val is None or ignore_exception: + raise cancelled_exc_while_waiting_tasks + + return ignore_exception + + def _spawn( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], + name: object, + task_status_future: asyncio.Future | None = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) + del _task_states[_task] + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + if task_status_future is None or task_status_future.done(): + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._parent_cancelled(): + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" + raise TypeError( + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" + ) + + name = get_callable_name(func) if name is None else str(name) + task = create_task(coro, name=name) + task.add_done_callback(task_done) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + self._tasks.add(task) + return task + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise + + +# +# Threads +# + +_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: set[WorkerThread], + idle_workers: deque[WorkerThread], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None + ] = Queue(2) + self.idle_since = AsyncIOBackend.current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: BaseException | None + ) -> None: + self.idle_since = AsyncIOBackend.current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + if isinstance(exc, StopIteration): + new_exc = RuntimeError("coroutine raised StopIteration") + new_exc.__cause__ = exc + exc = new_exc + + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread(AsyncIOBackend, self.loop): + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future, cancel_scope = item + if not future.cancelled(): + result = None + exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + finally: + del threadlocals.current_cancel_scope + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + self.queue.task_done() + + def stop(self, f: asyncio.Task | None = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + AsyncIOBackend.run_sync_from_thread( + partial(self._task_group.start_soon, name=name), + (self._call_func, func, args, kwargs, future), + self._loop, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.feed_eof() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: StreamWriterWrapper | None + _stdout: StreamReaderWrapper | None + _stderr: StreamReaderWrapper | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +def _forcibly_shutdown_process_pool_on_exit( + workers: set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: asyncio.AbstractChildWatcher | None = None + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). + + """ + process: abc.Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Exception | None) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + self.read_queue.append(data) + self.read_event.set() + + def eof_received(self) -> bool | None: + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: deque[tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Exception | None) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception from None + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will + # block until data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class _RawSocketMixin: + _receive_future: asyncio.Future | None = None + _send_future: asyncio.Future | None = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self._raw_socket.send(view) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self._raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self._raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: CancelScope | None = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await AsyncIOBackend.checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(transport, protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await AsyncIOBackend.checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +_read_events: RunVar[dict[Any, asyncio.Event]] = RunVar("read_events") +_write_events: RunVar[dict[Any, asyncio.Event]] = RunVar("write_events") + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> CapacityLimiter: + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: set[Any] = set() + self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + if value < 1: + raise ValueError("total_tokens must be >= 1") + + waiters_to_notify = max(value - self._total_tokens, 0) + self._total_tokens = value + + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] + event.set() + waiters_to_notify -= 1 + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> None: + self.acquire_on_behalf_of_nowait(current_task()) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's " + "tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's " "tokens" + ) from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem(last=False)[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +# +# Operating system signals +# + + +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: deque[Signals] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: set[Signals] = set() + + def _deliver(self, signum: Signals) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> _SignalReceiver: + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + return None + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +# +# Testing and debugging +# + + +def _create_task_info(task: asyncio.Task) -> TaskInfo: + task_state = _task_states.get(task) + if task_state is None: + parent_id = None + else: + parent_id = task_state.parent_id + + return TaskInfo(id(task), parent_id, task.get_name(), task.get_coro()) + + +class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + + def __init__( + self, + *, + debug: bool | None = None, + use_uvloop: bool = False, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + import uvloop + + loop_factory = uvloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) + self._exceptions: list[BaseException] = [] + self._runner_task: asyncio.Task | None = None + + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) + + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) + + @staticmethod + async def _run_tests_and_fixtures( + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + with receive_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + Tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + self._raise_async_exceptions() + + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", False) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + import uvloop + + loop_factory = uvloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + try: + cancel_scope = _task_states[ + current_task() # type: ignore[index] + ].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper(scope: CancelScope) -> T_Retval: + __tracebackhide__ = True + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + scope._tasks.discard(task) + + loop = cast(AbstractEventLoop, token) + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "asyncio") + wrapper = task_wrapper(threadlocals.current_cancel_scope) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, wrapper, loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + sniffio.current_async_library_cvar.set("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = cast(AbstractEventLoop, token) + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + await cls.checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + cast("str | bytes", command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_socket_readable(cls, sock: socket.socket) -> None: + await cls.checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError("reading from") from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + @classmethod + async def wait_socket_writable(cls, sock: socket.socket) -> None: + await cls.checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError("writing to") from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return _create_task_info(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> list[TaskInfo]: + return [_create_task_info(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/.venv/Lib/site-packages/anyio/_backends/_trio.py b/.venv/Lib/site-packages/anyio/_backends/_trio.py new file mode 100644 index 00000000..1a47192e --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,1169 @@ +from __future__ import annotations + +import array +import math +import socket +import sys +import types +from collections.abc import AsyncIterator, Iterable +from concurrent.futures import Future +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind +from types import TracebackType +from typing import ( + IO, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + ContextManager, + Coroutine, + Generic, + Mapping, + NoReturn, + Sequence, + TypeVar, + cast, + overload, +) + +import trio.from_thread +import trio.lowlevel +from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + wait_readable, + wait_writable, +) +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend +from ..streams.memory import MemoryObjectSendStream + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + + +# +# Event loop +# + +RunVar = trio.lowlevel.RunVar + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: trio.CancelScope | None = None, **kwargs: object + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> CancelScope: + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + # https://github.com/python-trio/trio-typing/pull/79 + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> None: + self.__original.cancel() + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +# +# Task groups +# + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> TaskGroup: + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + try: + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) + except BaseExceptionGroup as exc: + _, rest = exc.split(trio.Cancelled) + if not rest: + cancelled_exc = trio.Cancelled._create() + raise cancelled_exc from exc + + raise + finally: + self._active = False + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Threads +# + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + trio.from_thread.run_sync( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + trio_token=self._token, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: int | None = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: abc.ByteSendStream | None + _stdout: abc.ByteReceiveStream | None + _stderr: abc.ByteReceiveStream | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: + try: + await trio.sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> NoReturn: + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await trio.lowlevel.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await trio.lowlevel.checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> None: + self.__original.set() + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__( + cls, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> CapacityLimiter: + return object.__new__(cls) + + def __init__( + self, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> None: + if original is not None: + self.__original = original + else: + assert total_tokens is not None + self.__original = trio.CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> None: + self.__original.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self.__original.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=tuple(orig.borrowers), + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") + + +# +# Signal handling +# + + +class _SignalReceiver: + _iterator: AsyncIterator[int] + + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + + def __enter__(self) -> _SignalReceiver: + self._cm = trio.open_signal_receiver(*self._signals) + self._iterator = self._cm.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + signum = await self._iterator.__anext__() + return Signals(signum) + + +# +# Testing and debugging +# + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from queue import Queue + + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None + self._options = options + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._send_stream = None + + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: + trio.lowlevel.start_guest_run( + self._run_tests_and_fixtures, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._send_stream is None: + self._call_queue.get()() + + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: + self._call_queue.get()() + + return outcome_holder[0].unwrap() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) + + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + return self._call_in_runner_task(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + self._call_in_runner_task(test_func, **kwargs) + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run(func, *args) + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run_sync(func, *args) + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + process = await trio.lowlevel.open_process( # type: ignore[misc] + command, # type: ignore[arg-type] + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=shell, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: + ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: + ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_socket_readable(cls, sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_socket_writable(cls, sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + return TaskInfo(id(task), parent_id, task.name, task.coro) + + @classmethod + def get_running_tasks(cls) -> list[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append( + TaskInfo( + id(task), id(nursery.parent_task), task.name, task.coro + ) + ) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/.venv/Lib/site-packages/anyio/_core/__init__.py b/.venv/Lib/site-packages/anyio/_core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..f2f94f59 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-311.pyc new file mode 100644 index 00000000..46d00a4b Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-311.pyc new file mode 100644 index 00000000..1ff90773 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-311.pyc new file mode 100644 index 00000000..1c7af729 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-311.pyc new file mode 100644 index 00000000..5682dfef Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-311.pyc new file mode 100644 index 00000000..bf442631 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-311.pyc new file mode 100644 index 00000000..89c36402 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-311.pyc new file mode 100644 index 00000000..4579cc82 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-311.pyc new file mode 100644 index 00000000..48d4bc56 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-311.pyc new file mode 100644 index 00000000..2b0e0125 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-311.pyc new file mode 100644 index 00000000..9fd9595c Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-311.pyc new file mode 100644 index 00000000..31861e41 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-311.pyc b/.venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-311.pyc new file mode 100644 index 00000000..4bb96ac9 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/_core/_eventloop.py b/.venv/Lib/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 00000000..a9c6e825 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import math +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager +from importlib import import_module +from typing import TYPE_CHECKING, Any, TypeVar + +import sniffio + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +threadlocals = threading.local() + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this + thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + async_backend = get_async_backend(backend) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async + # library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return async_backend.run(func, args, {}, backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_async_backend().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return get_async_backend().current_time() + + +def get_all_backends() -> tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_async_backend().cancelled_exception_class() + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + threadlocals.current_async_backend = backend_class + threadlocals.current_token = token + try: + yield + finally: + del threadlocals.current_async_backend + del threadlocals.current_token + + +def get_async_backend(asynclib_name: str | None = None) -> AsyncBackend: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + modulename = "anyio._backends._" + asynclib_name + try: + module = sys.modules[modulename] + except KeyError: + module = import_module(modulename) + + return getattr(module, "backend_class") diff --git a/.venv/Lib/site-packages/anyio/_core/_exceptions.py b/.venv/Lib/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 00000000..571c3b85 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,73 @@ +from __future__ import annotations + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or + otherwise misbehaves. + """ + + +class BusyResourceError(Exception): + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """ + Raised when trying to read from a stream that has been closed from the other end. + """ + + +class IncompleteRead(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/.venv/Lib/site-packages/anyio/_core/_fileio.py b/.venv/Lib/site-packages/anyio/_core/_fileio.py new file mode 100644 index 00000000..d054be69 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,645 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from collections.abc import Callable, Iterable, Iterator, Sequence +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + AsyncIterator, + Final, + Generic, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> list[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: + ... + + @overload + async def write(self: AsyncFile[str], b: str) -> int: + ... + + async def write(self, b: ReadableBuffer | str) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: + ... + + @overload + async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: + ... + + async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: int | None = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[bytes]: + ... + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[str]: + ... + + +async def open_file( + file: str | PathLike[str] | int, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: Callable[[str, int], int] | None = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator[PathLike[str]] + + async def __anext__(self) -> Path: + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) + if nextval is None: + raise StopAsyncIteration from None + + return Path(nextval) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: str | PathLike[str]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: str | PathLike[str]) -> Path: + return Path(self._path / other) + + def __rtruediv__(self, other: str | PathLike[str]) -> Path: + return Path(other) / self + + @property + def parts(self) -> tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence[Path]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> Path: + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> list[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> Path: + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + def is_relative_to(self, other: str | PathLike[str]) -> bool: + try: + self.relative_to(other) + return True + except ValueError: + return False + + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> Path: + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) + + async def expanduser(self) -> Path: + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) + + def glob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) + + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> Path: + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) + + async def is_mount(self) -> bool: + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) + + def iterdir(self) -> AsyncIterator[Path]: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: str | PathLike[str]) -> Path: + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[bytes]: + ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[str]: + ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: str | None = None, errors: str | None = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + if sys.version_info >= (3, 12): + + def relative_to( + self, *other: str | PathLike[str], walk_up: bool = False + ) -> Path: + return Path(self._path.relative_to(*other, walk_up=walk_up)) + + else: + + def relative_to(self, *other: str | PathLike[str]) -> Path: + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> Path: + target = await to_thread.run_sync(os.readlink, self._path) + return Path(target) + + async def rename(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> Path: + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) + + def rglob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: str | PathLike[str]) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, abandon_on_cancel=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) + + async def symlink_to( + self, + target: str | bytes | PathLike[str] | PathLike[bytes], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + + def with_name(self, name: str) -> Path: + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> Path: + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> Path: + return Path(self._path.with_suffix(suffix)) + + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/.venv/Lib/site-packages/anyio/_core/_resources.py b/.venv/Lib/site-packages/anyio/_core/_resources.py new file mode 100644 index 00000000..b9a5344a --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_resources.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/.venv/Lib/site-packages/anyio/_core/_signals.py b/.venv/Lib/site-packages/anyio/_core/_signals.py new file mode 100644 index 00000000..115c749b --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_signals.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from collections.abc import AsyncIterator +from signal import Signals +from typing import ContextManager + +from ._eventloop import get_async_backend + + +def open_signal_receiver(*signals: Signals) -> ContextManager[AsyncIterator[Signals]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers + + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_async_backend().open_signal_receiver(*signals) diff --git a/.venv/Lib/site-packages/anyio/_core/_sockets.py b/.venv/Lib/site-packages/anyio/_core/_sockets.py new file mode 100644 index 00000000..0f0a3142 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,716 @@ +from __future__ import annotations + +import errno +import os +import socket +import ssl +import stat +import sys +from collections.abc import Awaitable +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from socket import AddressFamily, SocketKind +from typing import Any, Literal, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_async_backend +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[True], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[False], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: + ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: + ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = None, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_standard_compatible: bool = True, + tls_hostname: str | None = None, + happy_eyeballs_delay: float = 0.25, +) -> SocketStream | TLSStream: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC + 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, + each one is tried until one connection attempt succeeds. If the first attempt does + not connected within 250 milliseconds, a second attempt is started using the next + address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if + available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before + connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: SocketStream | None = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_async_backend() + local_address: IPSockAddrType | None = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs: list[tuple[socket.AddressFamily, str]] = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + else: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + + oserrors: list[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) + raise OSError("All connection attempts failed") from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = os.fspath(path) + return await get_async_backend().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on + all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address + family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``local_host`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_async_backend() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo( + local_host, + local_port, + family=family, + type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + listeners: list[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + sockaddr: tuple[str, int] | tuple[str, int, int, int] + for fam, kind, *_, sockaddr in sorted(set(gai_res)): + # Workaround for an uvloop bug where we don't get the correct scope ID for + # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to + # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539 + if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM: + continue + + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address + # reuse. + if sys.platform == "win32": + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # Workaround for #554 + if "%" in sockaddr[0]: + addr, scope_id = sockaddr[0].split("%", 1) + sockaddr = (addr, sockaddr[1], 0, int(scope_id)) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.create_tcp_listener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: str | bytes | PathLike[Any], + *, + mode: int | None = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be + removed first. + + """ + backlog = min(backlog, 65536) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) + try: + raw_socket.listen(backlog) + return get_async_backend().create_unix_listener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local + machine, making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + sock = await get_async_backend().create_udp_socket( + family, local_address, None, reuse_port + ) + return cast(UDPSocket, sock) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + sock = await get_async_backend().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) + + +async def getaddrinfo( + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host: bytes | None = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_async_backend().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_async_backend().getnameinfo(sockaddr, flags) + + +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket has data to be read. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_async_backend().wait_socket_readable(sock) + + +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_async_backend().wait_socket_writable(sock) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: tuple[str, int, int, int] | tuple[str, int], +) -> tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = sockaddr + if scope_id: + # PyPy (as of v7.3.11) leaves the interface name in the result, so + # we discard it and only get the scope ID from the end + # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) + host = host.split("%")[0] + + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | bytes | None + if path is not None: + path_str = os.fspath(path) + + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EBADF, errno.ELOOP): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket diff --git a/.venv/Lib/site-packages/anyio/_core/_streams.py b/.venv/Lib/site-packages/anyio/_core/_streams.py new file mode 100644 index 00000000..aa6b0c22 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_streams.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import math +from typing import Tuple, TypeVar +from warnings import warn + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +class create_memory_object_stream( + Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): + """ + Create a memory object stream. + + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. + :return: a tuple of (send stream, receive stream) + + """ + + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/.venv/Lib/site-packages/anyio/_core/_subprocesses.py b/.venv/Lib/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 00000000..5d5d7b76 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,140 @@ +from __future__ import annotations + +from collections.abc import AsyncIterable, Mapping, Sequence +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import IO, Any, cast + +from ..abc import Process +from ._eventloop import get_async_backend +from ._tasks import create_task_group + + +async def run_process( + command: str | bytes | Sequence[str | bytes], + *, + input: bytes | None = None, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + check: bool = True, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, +) -> CompletedProcess[bytes]: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process( + command, + stdin=PIPE if input else DEVNULL, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) as process: + stream_contents: list[bytes | None] = [None, None] + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: str | bytes | Sequence[str | bytes], + *, + stdin: int | IO[Any] | None = PIPE, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :return: an asynchronous process object + + """ + if isinstance(command, (str, bytes)): + return await get_async_backend().open_process( + command, + shell=True, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + return await get_async_backend().open_process( + command, + shell=False, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) diff --git a/.venv/Lib/site-packages/anyio/_core/_synchronization.py b/.venv/Lib/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 00000000..b274a31e --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,649 @@ +from __future__ import annotations + +import math +from collections import deque +from dataclasses import dataclass +from types import TracebackType + +from sniffio import AsyncLibraryNotFoundError + +from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled +from ._eventloop import get_async_backend +from ._exceptions import BusyResourceError, WouldBlock +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: TaskInfo | None + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> Event: + try: + return get_async_backend().create_event() + except AsyncLibraryNotFoundError: + return EventAdapter() + + def set(self) -> None: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns + immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class EventAdapter(Event): + _internal_event: Event | None = None + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + + return self._internal_event + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._internal_event is not None and self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + +class Lock: + _owner_task: TaskInfo | None = None + + def __init__(self) -> None: + self._waiters: deque[tuple[TaskInfo, Event]] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + task = get_current_task() + event = Event() + token = task, event + self._waiters.append(token) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(token) + elif self._owner_task == task: + self.release() + + raise + + assert self._owner_task == task + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + task = get_current_task() + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + if self._owner_task is not None: + raise WouldBlock + + self._owner_task = task + + def release(self) -> None: + """Release the lock.""" + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding this lock") + + if self._waiters: + self._owner_task, event = self._waiters.popleft() + event.set() + else: + del self._owner_task + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._owner_task is not None + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) + + +class Condition: + _owner_task: TaskInfo | None = None + + def __init__(self, lock: Lock | None = None): + self._lock = lock or Lock() + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> None: + """Release the underlying lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __init__(self, initial_value: int, *, max_value: int | None = None): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._value = initial_value + self._max_value = max_value + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> Semaphore: + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + event = Event() + self._waiters.append(event) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + else: + self.release() + + raise + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> None: + """Increment the semaphore value.""" + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + if self._waiters: + self._waiters.popleft().set() + else: + self._value += 1 + + @property + def value(self) -> int: + """The current value of the semaphore.""" + return self._value + + @property + def max_value(self) -> int | None: + """The maximum value of the semaphore.""" + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> CapacityLimiter: + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except AsyncLibraryNotFoundError: + return CapacityLimiterAdapter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their + tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire a token for the current task without waiting for one to become + available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become + available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None + + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) + + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens + + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + + return self._internal_limiter + + async def __aenter__(self) -> None: + await self._limiter.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") + + if self._internal_limiter is None: + self._total_tokens = value + return + + self._limiter.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 + + return self._internal_limiter.borrowed_tokens + + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.available_tokens + + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self._limiter.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) + + def release(self) -> None: + self._limiter.release() + + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) + + return self._internal_limiter.statistics() + + +class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + + .. versionadded:: 4.1 + """ + + __slots__ = "action", "_guarded" + + def __init__(self, action: str = "using"): + self.action: str = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + self._guarded = False + return None diff --git a/.venv/Lib/site-packages/anyio/_core/_tasks.py b/.venv/Lib/site-packages/anyio/_core/_tasks.py new file mode 100644 index 00000000..2f21ea20 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +import math +from collections.abc import Generator +from contextlib import contextmanager +from types import TracebackType + +from ..abc._tasks import TaskGroup, TaskStatus +from ._eventloop import get_async_backend + + +class _IgnoredTaskStatus(TaskStatus[object]): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope: + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) + + def cancel(self) -> None: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> CancelScope: + raise NotImplementedError + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + raise NotImplementedError + + +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] + + """ + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError + + +def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = ( + (get_async_backend().current_time() + delay) if delay is not None else math.inf + ) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the current + task. + + :return: a clock value from the event loop's internal clock (or ``float('inf')`` if + there is no deadline in effect, or ``float('-inf')`` if the current scope has + been cancelled) + :rtype: float + + """ + return get_async_backend().current_effective_deadline() + + +def create_task_group() -> TaskGroup: + """ + Create a task group. + + :return: a task group + + """ + return get_async_backend().create_task_group() diff --git a/.venv/Lib/site-packages/anyio/_core/_testing.py b/.venv/Lib/site-packages/anyio/_core/_testing.py new file mode 100644 index 00000000..1dae3b19 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_testing.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Generator +from typing import Any + +from ._eventloop import get_async_backend + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: int | None, + name: str | None, + coro: Generator[Any, Any, Any] | Awaitable[Any], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: int | None = parent_id + self.name: str | None = name + self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def _unwrap(self) -> TaskInfo: + return self + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_async_backend().get_current_task() + + +def get_running_tasks() -> list[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + return get_async_backend().get_running_tasks() + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_async_backend().wait_all_tasks_blocked() diff --git a/.venv/Lib/site-packages/anyio/_core/_typedattr.py b/.venv/Lib/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 00000000..74c6b8fd --- /dev/null +++ b/.venv/Lib/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload + +from ._exceptions import TypedAttributeLookupError + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding + values. + + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: + ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: + ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given + + """ + try: + return self.extra_attributes[attribute]() + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default diff --git a/.venv/Lib/site-packages/anyio/abc/__init__.py b/.venv/Lib/site-packages/anyio/abc/__init__.py new file mode 100644 index 00000000..1ca0fcf7 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/__init__.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from typing import Any + +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio.abc."): + value.__module__ = __name__ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..aaffc9b0 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/_eventloop.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/_eventloop.cpython-311.pyc new file mode 100644 index 00000000..7fe09d73 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/_eventloop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-311.pyc new file mode 100644 index 00000000..766cd0a7 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-311.pyc new file mode 100644 index 00000000..09554280 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-311.pyc new file mode 100644 index 00000000..268256e0 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-311.pyc new file mode 100644 index 00000000..242fb506 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-311.pyc new file mode 100644 index 00000000..e1f0801d Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-311.pyc b/.venv/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-311.pyc new file mode 100644 index 00000000..c44b44a4 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/abc/_eventloop.py b/.venv/Lib/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 00000000..4470d83d --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,392 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Mapping +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + ContextManager, + Sequence, + TypeVar, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from typing import Literal + + from .._core._synchronization import CapacityLimiter, Event + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ..from_thread import BlockingPortal + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + + :return: + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def create_blocking_portal(cls) -> BlockingPortal: + pass + + @classmethod + @overload + async def open_process( + cls, + command: str | bytes, + *, + shell: Literal[True], + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @overload + async def open_process( + cls, + command: Sequence[str | bytes], + *, + shell: Literal[False], + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: + ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: + ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_socket_readable(cls, sock: socket) -> None: + pass + + @classmethod + @abstractmethod + async def wait_socket_writable(cls, sock: socket) -> None: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> list[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/.venv/Lib/site-packages/anyio/abc/_resources.py b/.venv/Lib/site-packages/anyio/abc/_resources.py new file mode 100644 index 00000000..9693835b --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/_resources.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/.venv/Lib/site-packages/anyio/abc/_sockets.py b/.venv/Lib/site-packages/anyio/abc/_sockets.py new file mode 100644 index 00000000..b321225a --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,194 @@ +from __future__ import annotations + +import socket +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from contextlib import AsyncExitStack +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import Any, Tuple, TypeVar, Union + +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = Tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = Tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType = Tuple[bytes, str] +T_Retval = TypeVar("T_Retval") + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = ( + lambda: self._raw_socket.getsockname()[1] + ) + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, + handler: Callable[[SocketStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + from .. import create_task_group + + async with AsyncExitStack() as stack: + if task_group is None: + task_group = await stack.enter_async_context(create_task_group()) + + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/.venv/Lib/site-packages/anyio/abc/_streams.py b/.venv/Lib/site-packages/anyio/abc/_streams.py new file mode 100644 index 00000000..8c638683 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/_streams.py @@ -0,0 +1,203 @@ +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Callable +from typing import Any, Generic, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class UnreliableObjectReceiveStream( + Generic[T_co], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. + """ + + def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: + return self + + async def __anext__(self) -> T_co: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_co: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_contra], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_contra) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): + """ + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_contra]): + """ + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message + delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. + """ + + def __aiter__(self) -> ByteReceiveStream: + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty + :class:`bytes` object, and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/.venv/Lib/site-packages/anyio/abc/_subprocesses.py b/.venv/Lib/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 00000000..ce0564ce --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from abc import abstractmethod +from signal import Signals + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> int | None: + """ + The return code of the process. If the process has not yet terminated, this will + be ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> ByteSendStream | None: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> ByteReceiveStream | None: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> ByteReceiveStream | None: + """The stream for the standard error output of the process.""" diff --git a/.venv/Lib/site-packages/anyio/abc/_tasks.py b/.venv/Lib/site-packages/anyio/abc/_tasks.py new file mode 100644 index 00000000..7ad4938c --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable +from types import TracebackType +from typing import TYPE_CHECKING, Any, Protocol, TypeVar, overload + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from .._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +class TaskStatus(Protocol[T_contra]): + @overload + def started(self: TaskStatus[None]) -> None: + ... + + @overload + def started(self, value: T_contra) -> None: + ... + + def started(self, value: T_contra | None = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + """ + + cancel_scope: CancelScope + + @abstractmethod + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Awaitable[Any]], + *args: object, + name: object = None, + ) -> Any: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> TaskGroup: + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/.venv/Lib/site-packages/anyio/abc/_testing.py b/.venv/Lib/site-packages/anyio/abc/_testing.py new file mode 100644 index 00000000..4d70b9ec --- /dev/null +++ b/.venv/Lib/site-packages/anyio/abc/_testing.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the + same event loop. + """ + + def __enter__(self) -> TestRunner: + return self + + @abstractmethod + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool | None: + ... + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[_T, Any]], + kwargs: dict[str, Any], + ) -> Iterable[_T]: + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/.venv/Lib/site-packages/anyio/from_thread.py b/.venv/Lib/site-packages/anyio/from_thread.py new file mode 100644 index 00000000..4a987031 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/from_thread.py @@ -0,0 +1,476 @@ +from __future__ import annotations + +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait +from contextlib import AbstractContextManager, contextmanager +from inspect import isawaitable +from types import TracebackType +from typing import ( + Any, + AsyncContextManager, + ContextManager, + Generic, + Iterable, + TypeVar, + cast, + overload, +) + +from ._core import _eventloop +from ._core._eventloop import get_async_backend, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc import AsyncBackend +from .abc._tasks import TaskStatus + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co", covariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_async_from_thread(func, args, token=token) + + +def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_sync_from_thread(func, args, token=token) + + +class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): + _enter_future: Future[T_co] + _exit_future: Future[bool | None] + _exit_event: Event + _exit_exc_info: tuple[ + type[BaseException] | None, BaseException | None, TracebackType | None + ] = (None, None, None) + + def __init__(self, async_cm: AsyncContextManager[T_co], portal: BlockingPortal): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> bool | None: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + return self._enter_future.result() + + def __exit__( + self, + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> BlockingPortal: + return get_async_backend().create_blocking_portal() + + def __init__(self) -> None: + self._event_loop_thread_id: int | None = threading.get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> BlockingPortal: + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == threading.get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` + to let them finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + future: Future[T_Retval], + ) -> None: + def callback(f: Future[T_Retval]) -> None: + if f.cancelled() and self._event_loop_thread_id not in ( + None, + threading.get_ident(), + ): + self.call(scope.cancel) + + try: + retval_or_awaitable = func(*args, **kwargs) + if isawaitable(retval_or_awaitable): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval_or_awaitable + else: + retval = retval_or_awaitable + except self._cancelled_exc_class: + future.cancel() + future.set_running_or_notify_cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, + or the exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + ) -> T_Retval: + ... + + @overload + def call( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + ... + + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + ... + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + ... + + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling + the returned future. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the + task completes successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + :rtype: concurrent.futures.Future[T_Retval] + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future[T_Retval] = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Awaitable[T_Retval]], + *args: object, + name: object = None, + ) -> tuple[Future[T_Retval], Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`.abc.TaskGroup.start`. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` + is the value passed to ``task_status.started()`` from within the target + function + :rtype: tuple[concurrent.futures.Future[T_Retval], Any] + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future[T_Retval]) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AsyncContextManager[T_co] + ) -> ContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping + in the middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", backend_options: dict[str, Any] | None = None +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if future.set_running_or_notify_cancel(): + future.set_result(portal_) + await portal_.sleep_until_stopped() + + future: Future[BlockingPortal] = Future() + with ThreadPoolExecutor(1) as executor: + run_future = executor.submit( + _eventloop.run, # type: ignore[arg-type] + run_portal, + backend=backend, + backend_options=backend_options, + ) + try: + wait( + cast(Iterable[Future], [run_future, future]), + return_when=FIRST_COMPLETED, + ) + except BaseException: + future.cancel() + run_future.cancel() + raise + + if future.done(): + portal = future.result() + cancel_remaining_tasks = False + try: + yield portal + except BaseException: + cancel_remaining_tasks = True + raise + finally: + try: + portal.call(portal.stop, cancel_remaining_tasks) + except RuntimeError: + pass + + run_future.result() + + +def check_cancelled() -> None: + """ + Check if the cancel scope of the host task's running the current worker thread has + been cancelled. + + If the host task's current cancel scope has indeed been cancelled, the + backend-specific cancellation exception will be raised. + + :raises RuntimeError: if the current thread was not spawned by + :func:`.to_thread.run_sync` + + """ + try: + async_backend: AsyncBackend = threadlocals.current_async_backend + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + async_backend.check_cancelled() diff --git a/.venv/Lib/site-packages/anyio/lowlevel.py b/.venv/Lib/site-packages/anyio/lowlevel.py new file mode 100644 index 00000000..a9e10f43 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/lowlevel.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import enum +from dataclasses import dataclass +from typing import Any, Generic, Literal, TypeVar, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_async_backend + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().cancel_shielded_checkpoint() + + +def current_token() -> object: + """ + Return a backend specific token object that can be used to get back to the event + loop. + + """ + return get_async_backend().current_token() + + +_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() +_token_wrappers: dict[Any, _TokenWrapper] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = "_token", "__weakref__" + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): + self._var = var + self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """ + Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. + """ + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: set[_TokenWrapper] = set() + + def __init__( + self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> dict[str, T]: + token = current_token() + try: + return _run_vars[token] + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> T | D: + ... + + @overload + def get(self) -> T: + ... + + def get( + self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ) -> T | D: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/.venv/Lib/site-packages/anyio/py.typed b/.venv/Lib/site-packages/anyio/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/anyio/pytest_plugin.py b/.venv/Lib/site-packages/anyio/pytest_plugin.py new file mode 100644 index 00000000..a8dd6f3e --- /dev/null +++ b/.venv/Lib/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import ExitStack, contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction +from typing import Any, Dict, Tuple, cast + +import pytest +import sniffio + +from ._core._eventloop import get_all_backends, get_async_backend +from .abc import TestRunner + +_current_runner: TestRunner | None = None +_runner_stack: ExitStack | None = None +_runner_leases = 0 + + +def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(Tuple[str, Dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: dict[str, Any] +) -> Iterator[TestRunner]: + global _current_runner, _runner_leases, _runner_stack + if _current_runner is None: + asynclib = get_async_backend(backend_name) + _runner_stack = ExitStack() + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the + # async library + token = sniffio.current_async_library_cvar.set(backend_name) + _runner_stack.callback(sniffio.current_async_library_cvar.reset, token) + + backend_options = backend_options or {} + _current_runner = _runner_stack.enter_context( + asynclib.create_test_runner(backend_options) + ) + + _runner_leases += 1 + try: + yield _current_runner + finally: + _runner_leases -= 1 + if not _runner_leases: + assert _runner_stack is not None + _runner_stack.close() + _runner_stack = _current_runner = None + + +def pytest_configure(config: Any) -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run " + "asynchronously via anyio.", + ) + + +def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: + def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(func): + yield from runner.run_asyncgen_fixture(func, kwargs) + else: + yield runner.run_fixture(func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests + # that involve the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + has_backend_arg = "anyio_backend" in fixturedef.argnames + fixturedef.func = wrapper + if not has_backend_arg: + fixturedef.argnames += ("anyio_backend",) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if marker or any(marker.name == "anyio" for marker in own_markers): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + runner.run_test(pyfuncitem.obj, testargs) + + return True + + return None + + +@pytest.fixture(scope="module", params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/.venv/Lib/site-packages/anyio/streams/__init__.py b/.venv/Lib/site-packages/anyio/streams/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..5f8bf477 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-311.pyc b/.venv/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-311.pyc new file mode 100644 index 00000000..2c184d36 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/streams/__pycache__/file.cpython-311.pyc b/.venv/Lib/site-packages/anyio/streams/__pycache__/file.cpython-311.pyc new file mode 100644 index 00000000..e6443666 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/streams/__pycache__/file.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-311.pyc b/.venv/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-311.pyc new file mode 100644 index 00000000..4189abe9 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-311.pyc b/.venv/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-311.pyc new file mode 100644 index 00000000..ec7ddef0 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/streams/__pycache__/text.cpython-311.pyc b/.venv/Lib/site-packages/anyio/streams/__pycache__/text.cpython-311.pyc new file mode 100644 index 00000000..f2f1e9bb Binary files /dev/null and b/.venv/Lib/site-packages/anyio/streams/__pycache__/text.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-311.pyc b/.venv/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-311.pyc new file mode 100644 index 00000000..e003eeb5 Binary files /dev/null and b/.venv/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/anyio/streams/buffered.py b/.venv/Lib/site-packages/anyio/streams/buffered.py new file mode 100644 index 00000000..f5d5e836 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/streams/buffered.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from dataclasses import dataclass, field +from typing import Any + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/.venv/Lib/site-packages/anyio/streams/file.py b/.venv/Lib/site-packages/anyio/streams/file.py new file mode 100644 index 00000000..f4924642 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/streams/file.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: str | PathLike[str], append: bool = False + ) -> FileWriteStream: + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/.venv/Lib/site-packages/anyio/streams/memory.py b/.venv/Lib/site-packages/anyio/streams/memory.py new file mode 100644 index 00000000..bc2425b7 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/streams/memory.py @@ -0,0 +1,283 @@ +from __future__ import annotations + +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Generic, NamedTuple, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: OrderedDict[Event, list[T_Item]] = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: OrderedDict[Event, T_Item] = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): + _state: MemoryObjectStreamState[T_co] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_co: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_co: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + container: list[T_co] = [] + self._state.waiting_receivers[receive_event] = container + + try: + await receive_event.wait() + finally: + self._state.waiting_receivers.pop(receive_event, None) + + if container: + return container[0] + else: + raise EndOfStream + + def clone(self) -> MemoryObjectReceiveStream[T_co]: + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectReceiveStream[T_co]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): + _state: MemoryObjectStreamState[T_contra] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_contra) -> None: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + if self._state.waiting_receivers: + receive_event, container = self._state.waiting_receivers.popitem(last=False) + container.append(item) + receive_event.set() + elif len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) + raise + + if self._state.waiting_senders.pop(send_event, None): + raise BrokenResourceError from None + + def clone(self) -> MemoryObjectSendStream[T_contra]: + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectSendStream[T_contra]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() diff --git a/.venv/Lib/site-packages/anyio/streams/stapled.py b/.venv/Lib/site-packages/anyio/streams/stapled.py new file mode 100644 index 00000000..80f64a2e --- /dev/null +++ b/.venv/Lib/site-packages/anyio/streams/stapled.py @@ -0,0 +1,141 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. + + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: list[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/.venv/Lib/site-packages/anyio/streams/text.py b/.venv/Lib/site-packages/anyio/streams/text.py new file mode 100644 index 00000000..f1a11278 --- /dev/null +++ b/.venv/Lib/site-packages/anyio/streams/text.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +import codecs +from collections.abc import Callable, Mapping +from dataclasses import InitVar, dataclass, field +from typing import Any + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, +) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } diff --git a/.venv/Lib/site-packages/anyio/streams/tls.py b/.venv/Lib/site-packages/anyio/streams/tls.py new file mode 100644 index 00000000..e913eedb --- /dev/null +++ b/.venv/Lib/site-packages/anyio/streams/tls.py @@ -0,0 +1,338 @@ +from __future__ import annotations + +import logging +import re +import ssl +import sys +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from functools import wraps +from typing import Any, Tuple, TypeVar + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +_PCTRTT = Tuple[Tuple[str, str], ...] +_PCTRTTT = Tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: str | None = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: bytes | None = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared by the client during the TLS handshake (``None`` if this is the + #: client side) + shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the + #: stream is being closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: bool | None = None, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> TLSStream: + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, + ``False`` if this is the client side (if omitted, will be set to ``False`` + if ``hostname`` has been provided, ``False`` otherwise). Used only to create + a default context when an explicit context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure + default will be created) + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if ( + isinstance(exc, ssl.SSLEOFError) + or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() + if self._ssl_object.server_side + else None, + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. + + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + """ + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the + ``anyio.streams.tls`` logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/.venv/Lib/site-packages/anyio/to_process.py b/.venv/Lib/site-packages/anyio/to_process.py new file mode 100644 index 00000000..1ff06f0b --- /dev/null +++ b/.venv/Lib/site-packages/anyio/to_process.py @@ -0,0 +1,259 @@ +from __future__ import annotations + +import os +import pickle +import subprocess +import sys +from collections import deque +from collections.abc import Callable +from importlib.util import module_from_spec, spec_from_file_location +from typing import TypeVar, cast + +from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + cancellable: bool = False, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the worker process running it will be abruptly terminated using SIGKILL + (or ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's + running + :param limiter: capacity limiter to use to limit the total amount of processes + running (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_async_backend().setup_process_pool_exit_at_shutdown(workers) + + async with limiter or current_default_process_limiter(): + # Pop processes from the pool (starting from the most recently used) until we + # find one that hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME + # seconds or longer + now = current_time() + killed_processes: list[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process_to_kill, idle_since = idle_workers.popleft() + process_to_kill.kill() + workers.remove(process_to_kill) + killed_processes.append(process_to_kill) + + with CancelScope(shield=True): + for killed_process in killed_processes: + await killed_process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker + processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: str | None + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path: + # Load the parent's main module but as __mp_main__ instead of + # __main__ (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/.venv/Lib/site-packages/anyio/to_thread.py b/.venv/Lib/site-packages/anyio/to_thread.py new file mode 100644 index 00000000..5070516e --- /dev/null +++ b/.venv/Lib/site-packages/anyio/to_thread.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import sys +from collections.abc import Callable +from typing import TypeVar +from warnings import warn + +from ._core._eventloop import get_async_backend +from .abc import CapacityLimiter + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + abandon_on_cancel: bool = False, + cancellable: bool | None = None, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run + unchecked on own) if the host task is cancelled, ``False`` to ignore + cancellations in the host task until the operation has completed in the worker + thread + :param cancellable: deprecated alias of ``abandon_on_cancel``; will override + ``abandon_on_cancel`` if both parameters are passed + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + if cancellable is not None: + abandon_on_cancel = cancellable + warn( + "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " + "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", + DeprecationWarning, + stacklevel=2, + ) + + return await get_async_backend().run_sync_in_worker_thread( + func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter + ) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrent threads. + + :return: a capacity limiter object + + """ + return get_async_backend().current_default_thread_limiter() diff --git a/.venv/Lib/site-packages/attr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/__init__.cpython-311.pyc index 5ecced3d..5634e949 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-311.pyc index d4283ff5..cb9b92db 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_compat.cpython-311.pyc index 6053fc78..60edb5aa 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_compat.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_config.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_config.cpython-311.pyc index 6869acf8..0f77978c 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_config.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-311.pyc index 518baa41..e9a36243 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_make.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_make.cpython-311.pyc index aa95eb7f..f328931f 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_make.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/_make.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc index c6403e75..0e2824e3 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-311.pyc index b284217e..a2bc9812 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/converters.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/converters.cpython-311.pyc index dfe5e14f..ed0aaaef 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/converters.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/converters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-311.pyc index 144e1eda..2f74a614 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/filters.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/filters.cpython-311.pyc index 38c927ab..726cd178 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/filters.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/filters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/setters.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/setters.cpython-311.pyc index 203c8cff..23858505 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/setters.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/setters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/validators.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/validators.cpython-311.pyc index 1e674ca7..a0569fff 100644 Binary files a/.venv/Lib/site-packages/attr/__pycache__/validators.cpython-311.pyc and b/.venv/Lib/site-packages/attr/__pycache__/validators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/audioread/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/audioread/__pycache__/__init__.cpython-311.pyc index c8de2192..e17dee93 100644 Binary files a/.venv/Lib/site-packages/audioread/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/audioread/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/audioread/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/audioread/__pycache__/base.cpython-311.pyc index 338399e9..0c345dad 100644 Binary files a/.venv/Lib/site-packages/audioread/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/audioread/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/audioread/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/audioread/__pycache__/exceptions.cpython-311.pyc index dd281414..b3c9c51e 100644 Binary files a/.venv/Lib/site-packages/audioread/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/audioread/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/audioread/__pycache__/ffdec.cpython-311.pyc b/.venv/Lib/site-packages/audioread/__pycache__/ffdec.cpython-311.pyc index 25865ef0..88aba3bf 100644 Binary files a/.venv/Lib/site-packages/audioread/__pycache__/ffdec.cpython-311.pyc and b/.venv/Lib/site-packages/audioread/__pycache__/ffdec.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/audioread/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/audioread/__pycache__/version.cpython-311.pyc index 310d179f..fdeefeb0 100644 Binary files a/.venv/Lib/site-packages/audioread/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/audioread/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/babel/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/babel/__pycache__/__init__.cpython-311.pyc index 8e753420..64196ec4 100644 Binary files a/.venv/Lib/site-packages/babel/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/babel/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/babel/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/babel/__pycache__/core.cpython-311.pyc index 23ae80b9..9288fff0 100644 Binary files a/.venv/Lib/site-packages/babel/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/babel/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/babel/__pycache__/localedata.cpython-311.pyc b/.venv/Lib/site-packages/babel/__pycache__/localedata.cpython-311.pyc index 3a5b9c37..7413c8a7 100644 Binary files a/.venv/Lib/site-packages/babel/__pycache__/localedata.cpython-311.pyc and b/.venv/Lib/site-packages/babel/__pycache__/localedata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/babel/__pycache__/numbers.cpython-311.pyc b/.venv/Lib/site-packages/babel/__pycache__/numbers.cpython-311.pyc index 71e379d0..da527c40 100644 Binary files a/.venv/Lib/site-packages/babel/__pycache__/numbers.cpython-311.pyc and b/.venv/Lib/site-packages/babel/__pycache__/numbers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/babel/__pycache__/plural.cpython-311.pyc b/.venv/Lib/site-packages/babel/__pycache__/plural.cpython-311.pyc index 8667b597..1363f3b2 100644 Binary files a/.venv/Lib/site-packages/babel/__pycache__/plural.cpython-311.pyc and b/.venv/Lib/site-packages/babel/__pycache__/plural.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bangla/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/bangla/__pycache__/__init__.cpython-311.pyc index c6b4477c..8e87b1c9 100644 Binary files a/.venv/Lib/site-packages/bangla/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/bangla/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/blis/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/blis/__pycache__/__init__.cpython-311.pyc index e59161b6..0568f9ce 100644 Binary files a/.venv/Lib/site-packages/blis/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/blis/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnnumerizer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/bnnumerizer/__pycache__/__init__.cpython-311.pyc index 9e1405cb..4a998236 100644 Binary files a/.venv/Lib/site-packages/bnnumerizer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/bnnumerizer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnnumerizer/__pycache__/numerizer.cpython-311.pyc b/.venv/Lib/site-packages/bnnumerizer/__pycache__/numerizer.cpython-311.pyc index a7e1b06d..38247774 100644 Binary files a/.venv/Lib/site-packages/bnnumerizer/__pycache__/numerizer.cpython-311.pyc and b/.venv/Lib/site-packages/bnnumerizer/__pycache__/numerizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnnumerizer/__pycache__/words.cpython-311.pyc b/.venv/Lib/site-packages/bnnumerizer/__pycache__/words.cpython-311.pyc index beac3554..8b3df50a 100644 Binary files a/.venv/Lib/site-packages/bnnumerizer/__pycache__/words.cpython-311.pyc and b/.venv/Lib/site-packages/bnnumerizer/__pycache__/words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/__init__.cpython-311.pyc index ff7f49c8..0e7cd574 100644 Binary files a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/base.cpython-311.pyc index 577b2c61..685f2e90 100644 Binary files a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/indic.cpython-311.pyc b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/indic.cpython-311.pyc index 4f48d1a1..b498b812 100644 Binary files a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/indic.cpython-311.pyc and b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/indic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/langs.cpython-311.pyc b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/langs.cpython-311.pyc index b054c2fd..4474bdf4 100644 Binary files a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/langs.cpython-311.pyc and b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/langs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/normalizer.cpython-311.pyc b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/normalizer.cpython-311.pyc index ade88c2d..06ea70d9 100644 Binary files a/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/normalizer.cpython-311.pyc and b/.venv/Lib/site-packages/bnunicodenormalizer/__pycache__/normalizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/catalogue/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/catalogue/__pycache__/__init__.cpython-311.pyc index 3091b7fa..171358b5 100644 Binary files a/.venv/Lib/site-packages/catalogue/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/catalogue/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-311.pyc index 6e35f038..3c016295 100644 Binary files a/.venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/certifi/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/certifi/__pycache__/core.cpython-311.pyc index 8c30ef13..1f58ce7a 100644 Binary files a/.venv/Lib/site-packages/certifi/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/certifi/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-311.pyc index 78595e24..4deb442e 100644 Binary files a/.venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cffi/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/cffi/__pycache__/api.cpython-311.pyc index 1f3f006f..087df36d 100644 Binary files a/.venv/Lib/site-packages/cffi/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/cffi/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-311.pyc b/.venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-311.pyc index 3466c21a..d5abb8ae 100644 Binary files a/.venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-311.pyc and b/.venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-311.pyc b/.venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-311.pyc index e8a9848e..e74c01ac 100644 Binary files a/.venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-311.pyc and b/.venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cffi/__pycache__/error.cpython-311.pyc b/.venv/Lib/site-packages/cffi/__pycache__/error.cpython-311.pyc index 2b83bc84..460d47ac 100644 Binary files a/.venv/Lib/site-packages/cffi/__pycache__/error.cpython-311.pyc and b/.venv/Lib/site-packages/cffi/__pycache__/error.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cffi/__pycache__/lock.cpython-311.pyc b/.venv/Lib/site-packages/cffi/__pycache__/lock.cpython-311.pyc index fa2d0dd7..5185bf3d 100644 Binary files a/.venv/Lib/site-packages/cffi/__pycache__/lock.cpython-311.pyc and b/.venv/Lib/site-packages/cffi/__pycache__/lock.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cffi/__pycache__/model.cpython-311.pyc b/.venv/Lib/site-packages/cffi/__pycache__/model.cpython-311.pyc index 390c1af0..c7e407a0 100644 Binary files a/.venv/Lib/site-packages/cffi/__pycache__/model.cpython-311.pyc and b/.venv/Lib/site-packages/cffi/__pycache__/model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-311.pyc index fcfb4c00..9c5c7e20 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/api.cpython-311.pyc index b1088c8e..e4b5f1bd 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/cd.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/cd.cpython-311.pyc index eb65af7b..966548c9 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/cd.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/cd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/constant.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/constant.cpython-311.pyc index 410b5036..a3a9f485 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/constant.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/constant.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-311.pyc index 8bf34b57..d9911953 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/models.cpython-311.pyc index 1b6869fc..9accea59 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/models.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/utils.cpython-311.pyc index dcd5a612..bfee1b70 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/charset_normalizer/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/charset_normalizer/__pycache__/version.cpython-311.pyc index d112e9d3..9a62be3c 100644 Binary files a/.venv/Lib/site-packages/charset_normalizer/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/charset_normalizer/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/__init__.cpython-311.pyc index ecec0111..57a84d3c 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/_compat.cpython-311.pyc index 3445d1a5..7bf6a031 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/_compat.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/_winconsole.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/_winconsole.cpython-311.pyc index 701d3256..7b997e38 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/_winconsole.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/_winconsole.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/core.cpython-311.pyc index 5ced62f4..eb957335 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/decorators.cpython-311.pyc index b5eedd45..74cee9f9 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/exceptions.cpython-311.pyc index 1481a018..cba1586c 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/formatting.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/formatting.cpython-311.pyc index 59834959..43e43cf1 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/formatting.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/formatting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/globals.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/globals.cpython-311.pyc index a45248bd..c825adbe 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/globals.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/globals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/parser.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/parser.cpython-311.pyc index 8511694e..a748c8ba 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/parser.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/termui.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/termui.cpython-311.pyc index 39cba918..1b7856f5 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/termui.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/termui.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/types.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/types.cpython-311.pyc index 94c42400..741d538b 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/types.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/click/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/click/__pycache__/utils.cpython-311.pyc index 9d8ce1c6..47272e8a 100644 Binary files a/.venv/Lib/site-packages/click/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/click/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-311.pyc index 1bdd8cd6..cf3d1d60 100644 Binary files a/.venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-311.pyc b/.venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-311.pyc index 4b2d320b..2d4cd644 100644 Binary files a/.venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-311.pyc and b/.venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-311.pyc b/.venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-311.pyc index 53b1686b..4f59b8bc 100644 Binary files a/.venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-311.pyc and b/.venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-311.pyc b/.venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-311.pyc index 93b979c5..6ca720c4 100644 Binary files a/.venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-311.pyc and b/.venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/colorama/__pycache__/win32.cpython-311.pyc b/.venv/Lib/site-packages/colorama/__pycache__/win32.cpython-311.pyc index 22989d6b..432570b1 100644 Binary files a/.venv/Lib/site-packages/colorama/__pycache__/win32.cpython-311.pyc and b/.venv/Lib/site-packages/colorama/__pycache__/win32.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-311.pyc b/.venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-311.pyc index e628490e..cca98bf5 100644 Binary files a/.venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-311.pyc and b/.venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/confection/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/confection/__pycache__/__init__.cpython-311.pyc index 970150be..58ff5efd 100644 Binary files a/.venv/Lib/site-packages/confection/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/confection/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/confection/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/confection/__pycache__/util.cpython-311.pyc index 794667d2..45322871 100644 Binary files a/.venv/Lib/site-packages/confection/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/confection/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/coqpit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/coqpit/__pycache__/__init__.cpython-311.pyc index 6c4ea9ba..beb2c5c3 100644 Binary files a/.venv/Lib/site-packages/coqpit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/coqpit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/coqpit/__pycache__/coqpit.cpython-311.pyc b/.venv/Lib/site-packages/coqpit/__pycache__/coqpit.cpython-311.pyc index 44693a11..1cff2d34 100644 Binary files a/.venv/Lib/site-packages/coqpit/__pycache__/coqpit.cpython-311.pyc and b/.venv/Lib/site-packages/coqpit/__pycache__/coqpit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cycler/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/cycler/__pycache__/__init__.cpython-311.pyc index 2e33fc23..6be7c746 100644 Binary files a/.venv/Lib/site-packages/cycler/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/cycler/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cymem/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/cymem/__pycache__/__init__.cpython-311.pyc index 0d16b5c6..9be72ee6 100644 Binary files a/.venv/Lib/site-packages/cymem/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/cymem/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/cymem/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/cymem/__pycache__/about.cpython-311.pyc index 212aedca..f83418da 100644 Binary files a/.venv/Lib/site-packages/cymem/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/cymem/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/__init__.cpython-311.pyc index 72f32ba1..cd65dfa8 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/conf.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/conf.cpython-311.pyc index 273b04bd..ca02d032 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/conf.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/conf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/date.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/date.cpython-311.pyc index 7f39a437..9bd24b73 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/date.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/date.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/date_parser.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/date_parser.cpython-311.pyc index baaa5b2f..e13e30e4 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/date_parser.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/date_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/freshness_date_parser.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/freshness_date_parser.cpython-311.pyc index 845bc2c3..7465a3cb 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/freshness_date_parser.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/freshness_date_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/parser.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/parser.cpython-311.pyc index d49700ce..ecf2c816 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/parser.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/timezone_parser.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/timezone_parser.cpython-311.pyc index 3d25c2d4..20c1d600 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/timezone_parser.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/timezone_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/__pycache__/timezones.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/__pycache__/timezones.cpython-311.pyc index 412653a2..78e9f7a6 100644 Binary files a/.venv/Lib/site-packages/dateparser/__pycache__/timezones.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/__pycache__/timezones.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/__init__.cpython-311.pyc index 539376b1..0f443eda 100644 Binary files a/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/language_mapping.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/language_mapping.cpython-311.pyc index d9a855c9..ee6627b7 100644 Binary files a/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/language_mapping.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/custom_language_detection/__pycache__/language_mapping.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/data/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/data/__pycache__/__init__.cpython-311.pyc index f2d87ddc..c98e6026 100644 Binary files a/.venv/Lib/site-packages/dateparser/data/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/data/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/data/__pycache__/languages_info.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/data/__pycache__/languages_info.cpython-311.pyc index 6ea094c0..dd130950 100644 Binary files a/.venv/Lib/site-packages/dateparser/data/__pycache__/languages_info.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/data/__pycache__/languages_info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/data/date_translation_data/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/data/date_translation_data/__pycache__/__init__.cpython-311.pyc index 77b59758..8e209438 100644 Binary files a/.venv/Lib/site-packages/dateparser/data/date_translation_data/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/data/date_translation_data/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/languages/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/languages/__pycache__/__init__.cpython-311.pyc index f72613ac..8102e7de 100644 Binary files a/.venv/Lib/site-packages/dateparser/languages/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/languages/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/languages/__pycache__/dictionary.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/languages/__pycache__/dictionary.cpython-311.pyc index 8510de20..cab53e5e 100644 Binary files a/.venv/Lib/site-packages/dateparser/languages/__pycache__/dictionary.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/languages/__pycache__/dictionary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/languages/__pycache__/loader.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/languages/__pycache__/loader.cpython-311.pyc index 31d9fa64..bea61b23 100644 Binary files a/.venv/Lib/site-packages/dateparser/languages/__pycache__/loader.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/languages/__pycache__/loader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/languages/__pycache__/locale.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/languages/__pycache__/locale.cpython-311.pyc index fdac0716..733fe59b 100644 Binary files a/.venv/Lib/site-packages/dateparser/languages/__pycache__/locale.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/languages/__pycache__/locale.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/utils/__pycache__/__init__.cpython-311.pyc index 29b02e7a..373973a2 100644 Binary files a/.venv/Lib/site-packages/dateparser/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser/utils/__pycache__/strptime.cpython-311.pyc b/.venv/Lib/site-packages/dateparser/utils/__pycache__/strptime.cpython-311.pyc index 65f246b8..5d239ae7 100644 Binary files a/.venv/Lib/site-packages/dateparser/utils/__pycache__/strptime.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser/utils/__pycache__/strptime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser_data/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateparser_data/__pycache__/__init__.cpython-311.pyc index 81288215..878a37c7 100644 Binary files a/.venv/Lib/site-packages/dateparser_data/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser_data/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateparser_data/__pycache__/settings.cpython-311.pyc b/.venv/Lib/site-packages/dateparser_data/__pycache__/settings.cpython-311.pyc index b9611c64..9d57a5cd 100644 Binary files a/.venv/Lib/site-packages/dateparser_data/__pycache__/settings.cpython-311.pyc and b/.venv/Lib/site-packages/dateparser_data/__pycache__/settings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-311.pyc index 0e02bf9e..e3513072 100644 Binary files a/.venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-311.pyc index ce4171fd..a534719d 100644 Binary files a/.venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-311.pyc index b8e7d554..bccd276f 100644 Binary files a/.venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-311.pyc index 53d00754..18928ca5 100644 Binary files a/.venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-311.pyc index b06024a4..31e0f9d2 100644 Binary files a/.venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-311.pyc index 2e48aa20..2cf025aa 100644 Binary files a/.venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-311.pyc index e0599e2b..632e5726 100644 Binary files a/.venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-311.pyc index 3c79a42e..0530e168 100644 Binary files a/.venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-311.pyc index ed0f529b..c9e20a9c 100644 Binary files a/.venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-311.pyc index 6e4e384e..a02dac13 100644 Binary files a/.venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-311.pyc index dc0dbdf3..f40cfc12 100644 Binary files a/.venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-311.pyc index ad466684..f761adbb 100644 Binary files a/.venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-311.pyc index 8176e9b0..9673829d 100644 Binary files a/.venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-311.pyc b/.venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-311.pyc index 4ad1cb82..45a23aaf 100644 Binary files a/.venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-311.pyc and b/.venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/decorator-4.4.2.dist-info/INSTALLER b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/LICENSE.txt b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/LICENSE.txt similarity index 100% rename from .venv/Lib/site-packages/decorator-5.1.1.dist-info/LICENSE.txt rename to .venv/Lib/site-packages/decorator-4.4.2.dist-info/LICENSE.txt diff --git a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/METADATA b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/METADATA similarity index 90% rename from .venv/Lib/site-packages/decorator-5.1.1.dist-info/METADATA rename to .venv/Lib/site-packages/decorator-4.4.2.dist-info/METADATA index df407f80..fd12277a 100644 --- a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/METADATA +++ b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: decorator -Version: 5.1.1 +Version: 4.4.2 Summary: Decorators for Humans Home-page: https://github.com/micheles/decorator Author: Michele Simionato @@ -14,16 +14,20 @@ Classifier: License :: OSI Approved :: BSD License Classifier: Natural Language :: English Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities -Requires-Python: >=3.5 +Requires-Python: >=2.6, !=3.0.*, !=3.1.* Decorators for Humans ===================== diff --git a/.venv/Lib/site-packages/decorator-4.4.2.dist-info/RECORD b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/RECORD new file mode 100644 index 00000000..4cbe963d --- /dev/null +++ b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/RECORD @@ -0,0 +1,9 @@ +__pycache__/decorator.cpython-311.pyc,, +decorator-4.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +decorator-4.4.2.dist-info/LICENSE.txt,sha256=_RFmDKvwUyCCxFcGhi-vwpSQfsf44heBgkCkmZgGeC4,1309 +decorator-4.4.2.dist-info/METADATA,sha256=RYLh5Qy8XzYOcgCT6RsI_cTXG_PE1QvoAVT-u2vus80,4168 +decorator-4.4.2.dist-info/RECORD,, +decorator-4.4.2.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +decorator-4.4.2.dist-info/pbr.json,sha256=AL84oUUWQHwkd8OCPhLRo2NJjU5MDdmXMqRHv-posqs,47 +decorator-4.4.2.dist-info/top_level.txt,sha256=Kn6eQjo83ctWxXVyBMOYt0_YpjRjBznKYVuNyuC_DSI,10 +decorator.py,sha256=aQ8Ozc-EK26xBTOXVR5A-8Szgx99_bhaexZSGNn38Yc,17222 diff --git a/.venv/Lib/site-packages/decorator-4.4.2.dist-info/WHEEL b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/WHEEL new file mode 100644 index 00000000..78e6f69d --- /dev/null +++ b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/pbr.json b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/pbr.json similarity index 100% rename from .venv/Lib/site-packages/decorator-5.1.1.dist-info/pbr.json rename to .venv/Lib/site-packages/decorator-4.4.2.dist-info/pbr.json diff --git a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/top_level.txt b/.venv/Lib/site-packages/decorator-4.4.2.dist-info/top_level.txt similarity index 100% rename from .venv/Lib/site-packages/decorator-5.1.1.dist-info/top_level.txt rename to .venv/Lib/site-packages/decorator-4.4.2.dist-info/top_level.txt diff --git a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/RECORD b/.venv/Lib/site-packages/decorator-5.1.1.dist-info/RECORD deleted file mode 100644 index be0b0208..00000000 --- a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -__pycache__/decorator.cpython-311.pyc,, -decorator-5.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -decorator-5.1.1.dist-info/LICENSE.txt,sha256=_RFmDKvwUyCCxFcGhi-vwpSQfsf44heBgkCkmZgGeC4,1309 -decorator-5.1.1.dist-info/METADATA,sha256=XAr2zbYpRxCkcPbsmg1oaiS5ea7mhTq-j-wb0XjuVho,3955 -decorator-5.1.1.dist-info/RECORD,, -decorator-5.1.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -decorator-5.1.1.dist-info/pbr.json,sha256=AL84oUUWQHwkd8OCPhLRo2NJjU5MDdmXMqRHv-posqs,47 -decorator-5.1.1.dist-info/top_level.txt,sha256=Kn6eQjo83ctWxXVyBMOYt0_YpjRjBznKYVuNyuC_DSI,10 -decorator.py,sha256=el5cAEgoTEpRQN65tOxGhElue-CccMv0xol-J2MwOc0,16752 diff --git a/.venv/Lib/site-packages/decorator.py b/.venv/Lib/site-packages/decorator.py index 2479b6f7..b1f8b567 100644 --- a/.venv/Lib/site-packages/decorator.py +++ b/.venv/Lib/site-packages/decorator.py @@ -1,6 +1,6 @@ # ######################### LICENSE ############################ # -# Copyright (c) 2005-2021, Michele Simionato +# Copyright (c) 2005-2018, Michele Simionato # All rights reserved. # Redistribution and use in source and binary forms, with or without @@ -28,26 +28,55 @@ # DAMAGE. """ -Decorator module, see -https://github.com/micheles/decorator/blob/master/docs/documentation.md +Decorator module, see http://pypi.python.org/pypi/decorator for the documentation. """ +from __future__ import print_function + import re import sys import inspect import operator import itertools -from contextlib import _GeneratorContextManager -from inspect import getfullargspec, iscoroutinefunction, isgeneratorfunction +import collections + +__version__ = '4.4.2' + +if sys.version_info >= (3,): + from inspect import getfullargspec + + def get_init(cls): + return cls.__init__ +else: + FullArgSpec = collections.namedtuple( + 'FullArgSpec', 'args varargs varkw defaults ' + 'kwonlyargs kwonlydefaults annotations') + + def getfullargspec(f): + "A quick and dirty replacement for getfullargspec for Python 2.X" + return FullArgSpec._make(inspect.getargspec(f) + ([], None, {})) + + def get_init(cls): + return cls.__init__.__func__ + +try: + iscoroutinefunction = inspect.iscoroutinefunction +except AttributeError: + # let's assume there are no coroutine functions in old Python + def iscoroutinefunction(f): + return False +try: + from inspect import isgeneratorfunction +except ImportError: + # assume no generator function in old Python versions + def isgeneratorfunction(caller): + return False -__version__ = '5.1.1' DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(') -POS = inspect.Parameter.POSITIONAL_OR_KEYWORD -EMPTY = inspect.Parameter.empty -# this is not used anymore in the core, but kept for backward compatibility +# basic functionality class FunctionMaker(object): """ An object with the ability to create functions with a given signature. @@ -71,7 +100,7 @@ class FunctionMaker(object): self.name = '_lambda_' self.doc = func.__doc__ self.module = func.__module__ - if inspect.isroutine(func): + if inspect.isfunction(func): argspec = getfullargspec(func) self.annotations = getattr(func, '__annotations__', {}) for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', @@ -114,9 +143,7 @@ class FunctionMaker(object): raise TypeError('You are decorating a non function: %s' % func) def update(self, func, **kw): - """ - Update the signature of func with the data in self - """ + "Update the signature of func with the data in self" func.__name__ = self.name func.__doc__ = getattr(self, 'doc', None) func.__dict__ = getattr(self, 'dict', {}) @@ -133,9 +160,7 @@ class FunctionMaker(object): func.__dict__.update(kw) def make(self, src_templ, evaldict=None, addsource=False, **attrs): - """ - Make a new function from a given template and update the signature - """ + "Make a new function from a given template and update the signature" src = src_templ % vars(self) # expand name and signature evaldict = evaldict or {} mo = DEF.search(src) @@ -196,128 +221,106 @@ class FunctionMaker(object): return self.make(body, evaldict, addsource, **attrs) -def fix(args, kwargs, sig): +def decorate(func, caller, extras=()): """ - Fix args and kwargs to be consistent with the signature + decorate(func, caller) decorates a function using a caller. + If the caller is a generator function, the resulting function + will be a generator function. """ - ba = sig.bind(*args, **kwargs) - ba.apply_defaults() # needed for test_dan_schult - return ba.args, ba.kwargs + evaldict = dict(_call_=caller, _func_=func) + es = '' + for i, extra in enumerate(extras): + ex = '_e%d_' % i + evaldict[ex] = extra + es += ex + ', ' - -def decorate(func, caller, extras=(), kwsyntax=False): - """ - Decorates a function/generator/coroutine using a caller. - If kwsyntax is True calling the decorated functions with keyword - syntax will pass the named arguments inside the ``kw`` dictionary, - even if such argument are positional, similarly to what functools.wraps - does. By default kwsyntax is False and the the arguments are untouched. - """ - sig = inspect.signature(func) - if iscoroutinefunction(caller): - async def fun(*args, **kw): - if not kwsyntax: - args, kw = fix(args, kw, sig) - return await caller(func, *(extras + args), **kw) - elif isgeneratorfunction(caller): - def fun(*args, **kw): - if not kwsyntax: - args, kw = fix(args, kw, sig) - for res in caller(func, *(extras + args), **kw): - yield res + if '3.5' <= sys.version < '3.6': + # with Python 3.5 isgeneratorfunction returns True for all coroutines + # however we know that it is NOT possible to have a generator + # coroutine in python 3.5: PEP525 was not there yet + generatorcaller = isgeneratorfunction( + caller) and not iscoroutinefunction(caller) else: - def fun(*args, **kw): - if not kwsyntax: - args, kw = fix(args, kw, sig) - return caller(func, *(extras + args), **kw) - fun.__name__ = func.__name__ - fun.__doc__ = func.__doc__ - fun.__wrapped__ = func - fun.__signature__ = sig - fun.__qualname__ = func.__qualname__ - # builtin functions like defaultdict.__setitem__ lack many attributes - try: - fun.__defaults__ = func.__defaults__ - except AttributeError: - pass - try: - fun.__kwdefaults__ = func.__kwdefaults__ - except AttributeError: - pass - try: - fun.__annotations__ = func.__annotations__ - except AttributeError: - pass - try: - fun.__module__ = func.__module__ - except AttributeError: - pass - try: - fun.__dict__.update(func.__dict__) - except AttributeError: - pass + generatorcaller = isgeneratorfunction(caller) + if generatorcaller: + fun = FunctionMaker.create( + func, "for res in _call_(_func_, %s%%(shortsignature)s):\n" + " yield res" % es, evaldict, __wrapped__=func) + else: + fun = FunctionMaker.create( + func, "return _call_(_func_, %s%%(shortsignature)s)" % es, + evaldict, __wrapped__=func) + if hasattr(func, '__qualname__'): + fun.__qualname__ = func.__qualname__ return fun -def decoratorx(caller): - """ - A version of "decorator" implemented via "exec" and not via the - Signature object. Use this if you are want to preserve the `.__code__` - object properties (https://github.com/micheles/decorator/issues/129). - """ - def dec(func): - return FunctionMaker.create( - func, - "return _call_(_func_, %(shortsignature)s)", - dict(_call_=caller, _func_=func), - __wrapped__=func, __qualname__=func.__qualname__) - return dec - - -def decorator(caller, _func=None, kwsyntax=False): - """ - decorator(caller) converts a caller function into a decorator - """ +def decorator(caller, _func=None): + """decorator(caller) converts a caller function into a decorator""" if _func is not None: # return a decorated function # this is obsolete behavior; you should use decorate instead - return decorate(_func, caller, (), kwsyntax) + return decorate(_func, caller) # else return a decorator function - sig = inspect.signature(caller) - dec_params = [p for p in sig.parameters.values() if p.kind is POS] - - def dec(func=None, *args, **kw): - na = len(args) + 1 - extras = args + tuple(kw.get(p.name, p.default) - for p in dec_params[na:] - if p.default is not EMPTY) - if func is None: - return lambda func: decorate(func, caller, extras, kwsyntax) + defaultargs, defaults = '', () + if inspect.isclass(caller): + name = caller.__name__.lower() + doc = 'decorator(%s) converts functions/generators into ' \ + 'factories of %s objects' % (caller.__name__, caller.__name__) + elif inspect.isfunction(caller): + if caller.__name__ == '': + name = '_lambda_' else: - return decorate(func, caller, extras, kwsyntax) - dec.__signature__ = sig.replace(parameters=dec_params) - dec.__name__ = caller.__name__ - dec.__doc__ = caller.__doc__ - dec.__wrapped__ = caller - dec.__qualname__ = caller.__qualname__ - dec.__kwdefaults__ = getattr(caller, '__kwdefaults__', None) - dec.__dict__.update(caller.__dict__) + name = caller.__name__ + doc = caller.__doc__ + nargs = caller.__code__.co_argcount + ndefs = len(caller.__defaults__ or ()) + defaultargs = ', '.join(caller.__code__.co_varnames[nargs-ndefs:nargs]) + if defaultargs: + defaultargs += ',' + defaults = caller.__defaults__ + else: # assume caller is an object with a __call__ method + name = caller.__class__.__name__.lower() + doc = caller.__call__.__doc__ + evaldict = dict(_call=caller, _decorate_=decorate) + dec = FunctionMaker.create( + '%s(func, %s)' % (name, defaultargs), + 'if func is None: return lambda func: _decorate_(func, _call, (%s))\n' + 'return _decorate_(func, _call, (%s))' % (defaultargs, defaultargs), + evaldict, doc=doc, module=caller.__module__, __wrapped__=caller) + if defaults: + dec.__defaults__ = (None,) + defaults return dec # ####################### contextmanager ####################### # +try: # Python >= 3.2 + from contextlib import _GeneratorContextManager +except ImportError: # Python >= 2.5 + from contextlib import GeneratorContextManager as _GeneratorContextManager + class ContextManager(_GeneratorContextManager): - def __init__(self, g, *a, **k): - _GeneratorContextManager.__init__(self, g, a, k) - def __call__(self, func): - def caller(f, *a, **k): - with self.__class__(self.func, *self.args, **self.kwds): - return f(*a, **k) - return decorate(func, caller) + """Context manager decorator""" + return FunctionMaker.create( + func, "with _self_: return _func_(%(shortsignature)s)", + dict(_self_=self, _func_=func), __wrapped__=func) +init = getfullargspec(_GeneratorContextManager.__init__) +n_args = len(init.args) +if n_args == 2 and not init.varargs: # (self, genobj) Python 2.7 + def __init__(self, g, *a, **k): + return _GeneratorContextManager.__init__(self, g(*a, **k)) + ContextManager.__init__ = __init__ +elif n_args == 2 and init.varargs: # (self, gen, *a, **k) Python 3.4 + pass +elif n_args == 4: # (self, gen, args, kwds) Python 3.5 + def __init__(self, g, *a, **k): + return _GeneratorContextManager.__init__(self, g, a, k) + ContextManager.__init__ = __init__ + _contextmanager = decorator(ContextManager) diff --git a/.venv/Lib/site-packages/distro-1.9.0.dist-info/INSTALLER b/.venv/Lib/site-packages/distro-1.9.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/distro-1.9.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/distro-1.9.0.dist-info/LICENSE b/.venv/Lib/site-packages/distro-1.9.0.dist-info/LICENSE new file mode 100644 index 00000000..e06d2081 --- /dev/null +++ b/.venv/Lib/site-packages/distro-1.9.0.dist-info/LICENSE @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/.venv/Lib/site-packages/distro-1.9.0.dist-info/METADATA b/.venv/Lib/site-packages/distro-1.9.0.dist-info/METADATA new file mode 100644 index 00000000..9312e8e4 --- /dev/null +++ b/.venv/Lib/site-packages/distro-1.9.0.dist-info/METADATA @@ -0,0 +1,184 @@ +Metadata-Version: 2.1 +Name: distro +Version: 1.9.0 +Summary: Distro - an OS platform information API +Home-page: https://github.com/python-distro/distro +Author: Nir Cohen +Author-email: nir36g@gmail.com +License: Apache License, Version 2.0 +Platform: All +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: BSD :: FreeBSD +Classifier: Operating System :: POSIX :: BSD :: NetBSD +Classifier: Operating System :: POSIX :: BSD :: OpenBSD +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Operating System +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE + +Distro - an OS platform information API +======================================= + +[![CI Status](https://github.com/python-distro/distro/workflows/CI/badge.svg)](https://github.com/python-distro/distro/actions/workflows/ci.yaml) +[![PyPI version](http://img.shields.io/pypi/v/distro.svg)](https://pypi.python.org/pypi/distro) +[![Supported Python Versions](https://img.shields.io/pypi/pyversions/distro.svg)](https://img.shields.io/pypi/pyversions/distro.svg) +[![Code Coverage](https://codecov.io/github/python-distro/distro/coverage.svg?branch=master)](https://codecov.io/github/python-distro/distro?branch=master) +[![Is Wheel](https://img.shields.io/pypi/wheel/distro.svg?style=flat)](https://pypi.python.org/pypi/distro) +[![Latest Github Release](https://readthedocs.org/projects/distro/badge/?version=stable)](http://distro.readthedocs.io/en/latest/) + +`distro` provides information about the +OS distribution it runs on, such as a reliable machine-readable ID, or +version information. + +It is the recommended replacement for Python's original +[`platform.linux_distribution`](https://docs.python.org/3.7/library/platform.html#platform.linux_distribution) +function (removed in Python 3.8). It also provides much more functionality +which isn't necessarily Python bound, like a command-line interface. + +Distro currently supports Linux and BSD based systems but [Windows and OS X support](https://github.com/python-distro/distro/issues/177) is also planned. + +For Python 2.6 support, see https://github.com/python-distro/distro/tree/python2.6-support + +## Installation + +Installation of the latest released version from PyPI: + +```shell +pip install distro +``` + +Installation of the latest development version: + +```shell +pip install https://github.com/python-distro/distro/archive/master.tar.gz +``` + +To use as a standalone script, download `distro.py` directly: + +```shell +curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py +python distro.py +``` + +``distro`` is safe to vendor within projects that do not wish to add +dependencies. + +```shell +cd myproject +curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py +``` + +## Usage + +```bash +$ distro +Name: Antergos Linux +Version: 2015.10 (ISO-Rolling) +Codename: ISO-Rolling + +$ distro -j +{ + "codename": "ISO-Rolling", + "id": "antergos", + "like": "arch", + "version": "16.9", + "version_parts": { + "build_number": "", + "major": "16", + "minor": "9" + } +} + + +$ python +>>> import distro +>>> distro.name(pretty=True) +'CentOS Linux 8' +>>> distro.id() +'centos' +>>> distro.version(best=True) +'8.4.2105' +``` + + +## Documentation + +On top of the aforementioned API, several more functions are available. For a complete description of the +API, see the [latest API documentation](http://distro.readthedocs.org/en/latest/). + +## Background + +An alternative implementation became necessary because Python 3.5 deprecated +this function, and Python 3.8 removed it altogether. Its predecessor function +[`platform.dist`](https://docs.python.org/3.7/library/platform.html#platform.dist) +was already deprecated since Python 2.6 and removed in Python 3.8. Still, there +are many cases in which access to that information is needed. See [Python issue +1322](https://bugs.python.org/issue1322) for more information. + +The `distro` package implements a robust and inclusive way of retrieving the +information about a distribution based on new standards and old methods, +namely from these data sources (from high to low precedence): + +* The os-release file `/etc/os-release` if present, with a fall-back on `/usr/lib/os-release` if needed. +* The output of the `lsb_release` command, if available. +* The distro release file (`/etc/*(-|_)(release|version)`), if present. +* The `uname` command for BSD based distrubtions. + + +## Python and Distribution Support + +`distro` is supported and tested on Python 3.6+ and PyPy and on any +distribution that provides one or more of the data sources covered. + +This package is tested with test data that mimics the exact behavior of the data sources of [a number of Linux distributions](https://github.com/python-distro/distro/tree/master/tests/resources/distros). + + +## Testing + +```shell +git clone git@github.com:python-distro/distro.git +cd distro +pip install tox +tox +``` + + +## Contributions + +Pull requests are always welcome to deal with specific distributions or just +for general merriment. + +See [CONTRIBUTIONS](https://github.com/python-distro/distro/blob/master/CONTRIBUTING.md) for contribution info. + +Reference implementations for supporting additional distributions and file +formats can be found here: + +* https://github.com/saltstack/salt/blob/develop/salt/grains/core.py#L1172 +* https://github.com/chef/ohai/blob/master/lib/ohai/plugins/linux/platform.rb +* https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/facts/system/distribution.py +* https://github.com/puppetlabs/facter/blob/master/lib/src/facts/linux/os_linux.cc + +## Package manager distributions + +* https://src.fedoraproject.org/rpms/python-distro +* https://www.archlinux.org/packages/community/any/python-distro/ +* https://launchpad.net/ubuntu/+source/python-distro +* https://packages.debian.org/stable/python3-distro +* https://packages.gentoo.org/packages/dev-python/distro +* https://pkgs.org/download/python3-distro +* https://slackbuilds.org/repository/14.2/python/python-distro/ diff --git a/.venv/Lib/site-packages/distro-1.9.0.dist-info/RECORD b/.venv/Lib/site-packages/distro-1.9.0.dist-info/RECORD new file mode 100644 index 00000000..00285c6a --- /dev/null +++ b/.venv/Lib/site-packages/distro-1.9.0.dist-info/RECORD @@ -0,0 +1,15 @@ +../../Scripts/distro.exe,sha256=MsnsAX-L0e2FozoWtUUvWdvbJ0zcogSwFHCVgcBj1QE,108398 +distro-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +distro-1.9.0.dist-info/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 +distro-1.9.0.dist-info/METADATA,sha256=MWMqst5VkRMQkbM5e9zfeXcYV52Fp1GG8Gg53QwJ6B0,6791 +distro-1.9.0.dist-info/RECORD,, +distro-1.9.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +distro-1.9.0.dist-info/entry_points.txt,sha256=3ObjqQMbh1xeQQwsWtgbfDNDMDD-EbggR1Oj_z8s9hc,46 +distro-1.9.0.dist-info/top_level.txt,sha256=ikde_V_XEdSBqaGd5tEriN_wzYHLgTX_zVtlsGLHvwQ,7 +distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981 +distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64 +distro/__pycache__/__init__.cpython-311.pyc,, +distro/__pycache__/__main__.cpython-311.pyc,, +distro/__pycache__/distro.cpython-311.pyc,, +distro/distro.py,sha256=XqbefacAhDT4zr_trnbA15eY8vdK4GTghgmvUGrEM_4,49430 +distro/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/Lib/site-packages/distro-1.9.0.dist-info/WHEEL b/.venv/Lib/site-packages/distro-1.9.0.dist-info/WHEEL new file mode 100644 index 00000000..98c0d20b --- /dev/null +++ b/.venv/Lib/site-packages/distro-1.9.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/distro-1.9.0.dist-info/entry_points.txt b/.venv/Lib/site-packages/distro-1.9.0.dist-info/entry_points.txt new file mode 100644 index 00000000..08d29c55 --- /dev/null +++ b/.venv/Lib/site-packages/distro-1.9.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +distro = distro.distro:main diff --git a/.venv/Lib/site-packages/distro-1.9.0.dist-info/top_level.txt b/.venv/Lib/site-packages/distro-1.9.0.dist-info/top_level.txt new file mode 100644 index 00000000..0e093317 --- /dev/null +++ b/.venv/Lib/site-packages/distro-1.9.0.dist-info/top_level.txt @@ -0,0 +1 @@ +distro diff --git a/.venv/Lib/site-packages/distro/__init__.py b/.venv/Lib/site-packages/distro/__init__.py new file mode 100644 index 00000000..7686fe85 --- /dev/null +++ b/.venv/Lib/site-packages/distro/__init__.py @@ -0,0 +1,54 @@ +from .distro import ( + NORMALIZED_DISTRO_ID, + NORMALIZED_LSB_ID, + NORMALIZED_OS_ID, + LinuxDistribution, + __version__, + build_number, + codename, + distro_release_attr, + distro_release_info, + id, + info, + like, + linux_distribution, + lsb_release_attr, + lsb_release_info, + major_version, + minor_version, + name, + os_release_attr, + os_release_info, + uname_attr, + uname_info, + version, + version_parts, +) + +__all__ = [ + "NORMALIZED_DISTRO_ID", + "NORMALIZED_LSB_ID", + "NORMALIZED_OS_ID", + "LinuxDistribution", + "build_number", + "codename", + "distro_release_attr", + "distro_release_info", + "id", + "info", + "like", + "linux_distribution", + "lsb_release_attr", + "lsb_release_info", + "major_version", + "minor_version", + "name", + "os_release_attr", + "os_release_info", + "uname_attr", + "uname_info", + "version", + "version_parts", +] + +__version__ = __version__ diff --git a/.venv/Lib/site-packages/distro/__main__.py b/.venv/Lib/site-packages/distro/__main__.py new file mode 100644 index 00000000..0c01d5b0 --- /dev/null +++ b/.venv/Lib/site-packages/distro/__main__.py @@ -0,0 +1,4 @@ +from .distro import main + +if __name__ == "__main__": + main() diff --git a/.venv/Lib/site-packages/distro/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/distro/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..f1d86d1e Binary files /dev/null and b/.venv/Lib/site-packages/distro/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/distro/__pycache__/__main__.cpython-311.pyc b/.venv/Lib/site-packages/distro/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 00000000..e96e627a Binary files /dev/null and b/.venv/Lib/site-packages/distro/__pycache__/__main__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/distro/__pycache__/distro.cpython-311.pyc b/.venv/Lib/site-packages/distro/__pycache__/distro.cpython-311.pyc new file mode 100644 index 00000000..4565179e Binary files /dev/null and b/.venv/Lib/site-packages/distro/__pycache__/distro.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/distro/distro.py b/.venv/Lib/site-packages/distro/distro.py new file mode 100644 index 00000000..78ccdfa4 --- /dev/null +++ b/.venv/Lib/site-packages/distro/distro.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# Copyright 2015-2021 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is the recommended replacement for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.8 removed it altogether. Its +predecessor function :py:func:`platform.dist` was already deprecated since +Python 2.6 and removed in Python 3.8. Still, there are many cases in which +access to OS distribution information is needed. See `Python issue 1322 +`_ for more information. +""" + +import argparse +import json +import logging +import os +import re +import shlex +import subprocess +import sys +import warnings +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, +) + +try: + from typing import TypedDict +except ImportError: + # Python 3.7 + TypedDict = dict + +__version__ = "1.9.0" + + +class VersionDict(TypedDict): + major: str + minor: str + build_number: str + + +class InfoDict(TypedDict): + id: str + version: str + version_parts: VersionDict + like: str + codename: str + + +_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") +_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") +_OS_RELEASE_BASENAME = "os-release" + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = { + "ol": "oracle", # Oracle Linux + "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap +} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 + "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 + "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation + "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server + "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + "redhat": "rhel", # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" +) + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") + +# Base file names to be looked up for if _UNIXCONFDIR is not readable. +_DISTRO_RELEASE_BASENAMES = [ + "SuSE-release", + "altlinux-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "rocky-release", + "sl-release", + "slackware-version", +] + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + "debian_version", + "lsb-release", + "oem-release", + _OS_RELEASE_BASENAME, + "system-release", + "plesk-release", + "iredmail-release", + "board-release", + "ec2_version", +) + + +def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: + """ + .. deprecated:: 1.6.0 + + :func:`distro.linux_distribution()` is deprecated. It should only be + used as a compatibility shim with Python's + :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, + :func:`distro.version` and :func:`distro.name` instead. + + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The extra item (usually in parentheses) after the + os-release version number, or the result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + warnings.warn( + "distro.linux_distribution() is deprecated. It should only be used as a " + "compatibility shim with Python's platform.linux_distribution(). Please use " + "distro.id(), distro.version() and distro.name() instead.", + DeprecationWarning, + stacklevel=2, + ) + return _distro.linux_distribution(full_distribution_name) + + +def id() -> str: + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amzn" Amazon Linux + "arch" Arch Linux + "buildroot" Buildroot + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + "midnightbsd" MidnightBSD + "rocky" Rocky Linux + "aix" AIX + "guix" Guix System + "altlinux" ALT Linux + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty: bool = False) -> str: + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + Some other distributions may not provide this kind of information. In these + cases, an empty string would be returned. This behavior can be observed + with rolling releases distributions (e.g. Arch Linux). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best: bool = False) -> str: + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best: bool = False) -> str: + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best: bool = False) -> str: + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like() -> str: + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename() -> str: + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute: str) -> str: + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute: str) -> str: + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +try: + from functools import cached_property +except ImportError: + # Python < 3.8 + class cached_property: # type: ignore + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + + def __init__(self, f: Callable[[Any], Any]) -> None: + self._fname = f.__name__ + self._f = f + + def __get__(self, obj: Any, owner: Type[Any]) -> Any: + assert obj is not None, f"call {self._fname} on an instance" + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution: + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__( + self, + include_lsb: Optional[bool] = None, + os_release_file: str = "", + distro_release_file: str = "", + include_uname: Optional[bool] = None, + root_dir: Optional[str] = None, + include_oslevel: Optional[bool] = None, + ) -> None: + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_uname`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + * ``root_dir`` (string): The absolute path to the root directory to use + to find distro-related information files. Note that ``include_*`` + parameters must not be enabled in combination with ``root_dir``. + + * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command + output is included as a data source. If the oslevel command is not + available in the program execution path the data source will be + empty. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + * ``include_oslevel`` (bool): The result of the ``include_oslevel`` + parameter. This controls whether (AIX) oslevel information will be + loaded. + + * ``root_dir`` (string): The result of the ``root_dir`` parameter. + The absolute path to the root directory to use to find distro-related + information files. + + Raises: + + * :py:exc:`ValueError`: Initialization parameters combination is not + supported. + + * :py:exc:`OSError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.root_dir = root_dir + self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR + self.usr_lib_dir = ( + os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR + ) + + if os_release_file: + self.os_release_file = os_release_file + else: + etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) + usr_lib_os_release_file = os.path.join( + self.usr_lib_dir, _OS_RELEASE_BASENAME + ) + + # NOTE: The idea is to respect order **and** have it set + # at all times for API backwards compatibility. + if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( + usr_lib_os_release_file + ): + self.os_release_file = etc_dir_os_release_file + else: + self.os_release_file = usr_lib_os_release_file + + self.distro_release_file = distro_release_file or "" # updated later + + is_root_dir_defined = root_dir is not None + if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): + raise ValueError( + "Including subprocess data sources from specific root_dir is disallowed" + " to prevent false information" + ) + self.include_lsb = ( + include_lsb if include_lsb is not None else not is_root_dir_defined + ) + self.include_uname = ( + include_uname if include_uname is not None else not is_root_dir_defined + ) + self.include_oslevel = ( + include_oslevel if include_oslevel is not None else not is_root_dir_defined + ) + + def __repr__(self) -> str: + """Return repr of all info""" + return ( + "LinuxDistribution(" + "os_release_file={self.os_release_file!r}, " + "distro_release_file={self.distro_release_file!r}, " + "include_lsb={self.include_lsb!r}, " + "include_uname={self.include_uname!r}, " + "include_oslevel={self.include_oslevel!r}, " + "root_dir={self.root_dir!r}, " + "_os_release_info={self._os_release_info!r}, " + "_lsb_release_info={self._lsb_release_info!r}, " + "_distro_release_info={self._distro_release_info!r}, " + "_uname_info={self._uname_info!r}, " + "_oslevel_info={self._oslevel_info!r})".format(self=self) + ) + + def linux_distribution( + self, full_distribution_name: bool = True + ) -> Tuple[str, str, str]: + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self._os_release_info.get("release_codename") or self.codename(), + ) + + def id(self) -> str: + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + + def normalize(distro_id: str, table: Dict[str, str]) -> str: + distro_id = distro_id.lower().replace(" ", "_") + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr("distributor_id") + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return "" + + def name(self, pretty: bool = False) -> str: + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = ( + self.os_release_attr("name") + or self.lsb_release_attr("distributor_id") + or self.distro_release_attr("name") + or self.uname_attr("name") + ) + if pretty: + name = self.os_release_attr("pretty_name") or self.lsb_release_attr( + "description" + ) + if not name: + name = self.distro_release_attr("name") or self.uname_attr("name") + version = self.version(pretty=True) + if version: + name = f"{name} {version}" + return name or "" + + def version(self, pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr("version_id"), + self.lsb_release_attr("release"), + self.distro_release_attr("version_id"), + self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( + "version_id", "" + ), + self._parse_distro_release_content( + self.lsb_release_attr("description") + ).get("version_id", ""), + self.uname_attr("release"), + ] + if self.uname_attr("id").startswith("aix"): + # On AIX platforms, prefer oslevel command output. + versions.insert(0, self.oslevel_info()) + elif self.id() == "debian" or "debian" in self.like().split(): + # On Debian-like, add debian_version file content to candidates list. + versions.append(self._debian_version) + version = "" + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == "": + version = v + else: + for v in versions: + if v != "": + version = v + break + if pretty and version and self.codename(): + version = f"{version} ({self.codename()})" + return version + + def version_parts(self, best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or "", build_number or "" + return "", "", "" + + def major_version(self, best: bool = False) -> str: + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best: bool = False) -> str: + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best: bool = False) -> str: + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self) -> str: + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr("id_like") or "" + + def codename(self) -> str: + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info["codename"] + except KeyError: + return ( + self.lsb_release_attr("codename") + or self.distro_release_attr("codename") + or "" + ) + + def info(self, pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return InfoDict( + id=self.id(), + version=self.version(pretty, best), + version_parts=VersionDict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best), + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + return self._uname_info + + def oslevel_info(self) -> str: + """ + Return AIX' oslevel command output. + """ + return self._oslevel_info + + def os_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, "") + + def lsb_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, "") + + def distro_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, "") + + def uname_attr(self, attribute: str) -> str: + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_attr`. + """ + return self._uname_info.get(attribute, "") + + @cached_property + def _os_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file, encoding="utf-8") as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + # Ignore any tokens that are not variable assignments + if "=" in token: + k, v = token.split("=", 1) + props[k.lower()] = v + + if "version" in props: + # extract release codename (if any) from version attribute + match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) + if match: + release_codename = match.group(1) or match.group(2) + props["codename"] = props["release_codename"] = release_codename + + if "version_codename" in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props["codename"] = props["version_codename"] + elif "ubuntu_codename" in props: + # Same as above but a non-standard field name used on older Ubuntus + props["codename"] = props["ubuntu_codename"] + + return props + + @cached_property + def _lsb_release_info(self) -> Dict[str, str]: + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} + content = self._to_str(stdout).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip("\n").split(":", 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(" ", "_").lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self) -> Dict[str, str]: + if not self.include_uname: + return {} + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + except OSError: + return {} + content = self._to_str(stdout).splitlines() + return self._parse_uname_content(content) + + @cached_property + def _oslevel_info(self) -> str: + if not self.include_oslevel: + return "" + try: + stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) + except (OSError, subprocess.CalledProcessError): + return "" + return self._to_str(stdout).strip() + + @cached_property + def _debian_version(self) -> str: + try: + with open( + os.path.join(self.etc_dir, "debian_version"), encoding="ascii" + ) as fp: + return fp.readline().rstrip() + except FileNotFoundError: + return "" + + @staticmethod + def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: + if not lines: + return {} + props = {} + match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == "Linux": + return {} + props["id"] = name.lower() + props["name"] = name + props["release"] = version + return props + + @staticmethod + def _to_str(bytestring: bytes) -> str: + encoding = sys.getfilesystemencoding() + return bytestring.decode(encoding) + + @cached_property + def _distro_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file(self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + else: + try: + basenames = [ + basename + for basename in os.listdir(self.etc_dir) + if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES + and os.path.isfile(os.path.join(self.etc_dir, basename)) + ] + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = _DISTRO_RELEASE_BASENAMES + for basename in basenames: + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match is None: + continue + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + # The name is always present if the pattern matches. + if "name" not in distro_info: + continue + self.distro_release_file = filepath + break + else: # the loop didn't "break": no candidate. + return {} + + if match is not None: + distro_info["id"] = match.group(1) + + # CloudLinux < 7: manually enrich info with proper id. + if "cloudlinux" in distro_info.get("name", "").lower(): + distro_info["id"] = "cloudlinux" + + return distro_info + + def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath, encoding="utf-8") as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except OSError: + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/python-distro/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line: str) -> Dict[str, str]: + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info["name"] = matches.group(3)[::-1] + if matches.group(2): + distro_info["version_id"] = matches.group(2)[::-1] + if matches.group(1): + distro_info["codename"] = matches.group(1)[::-1] + elif line: + distro_info["name"] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main() -> None: + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + "--json", "-j", help="Output in machine readable format", action="store_true" + ) + + parser.add_argument( + "--root-dir", + "-r", + type=str, + dest="root_dir", + help="Path to the root filesystem directory (defaults to /)", + ) + + args = parser.parse_args() + + if args.root_dir: + dist = LinuxDistribution( + include_lsb=False, + include_uname=False, + include_oslevel=False, + root_dir=args.root_dir, + ) + else: + dist = _distro + + if args.json: + logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) + else: + logger.info("Name: %s", dist.name(pretty=True)) + distribution_version = dist.version(pretty=True) + logger.info("Version: %s", distribution_version) + distribution_codename = dist.codename() + logger.info("Codename: %s", distribution_codename) + + +if __name__ == "__main__": + main() diff --git a/.venv/Lib/site-packages/distro/py.typed b/.venv/Lib/site-packages/distro/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/einops/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/einops/__pycache__/__init__.cpython-311.pyc index 5003c37e..6c8727a8 100644 Binary files a/.venv/Lib/site-packages/einops/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/einops/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/__pycache__/_backends.cpython-311.pyc b/.venv/Lib/site-packages/einops/__pycache__/_backends.cpython-311.pyc index f0246352..d194d57a 100644 Binary files a/.venv/Lib/site-packages/einops/__pycache__/_backends.cpython-311.pyc and b/.venv/Lib/site-packages/einops/__pycache__/_backends.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/__pycache__/_torch_specific.cpython-311.pyc b/.venv/Lib/site-packages/einops/__pycache__/_torch_specific.cpython-311.pyc index 207993d5..c66f5538 100644 Binary files a/.venv/Lib/site-packages/einops/__pycache__/_torch_specific.cpython-311.pyc and b/.venv/Lib/site-packages/einops/__pycache__/_torch_specific.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/__pycache__/einops.cpython-311.pyc b/.venv/Lib/site-packages/einops/__pycache__/einops.cpython-311.pyc index 9043be9e..7baf01a2 100644 Binary files a/.venv/Lib/site-packages/einops/__pycache__/einops.cpython-311.pyc and b/.venv/Lib/site-packages/einops/__pycache__/einops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/__pycache__/packing.cpython-311.pyc b/.venv/Lib/site-packages/einops/__pycache__/packing.cpython-311.pyc index 26009735..dfa35c89 100644 Binary files a/.venv/Lib/site-packages/einops/__pycache__/packing.cpython-311.pyc and b/.venv/Lib/site-packages/einops/__pycache__/packing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/__pycache__/parsing.cpython-311.pyc b/.venv/Lib/site-packages/einops/__pycache__/parsing.cpython-311.pyc index b9473edf..9fafe2e2 100644 Binary files a/.venv/Lib/site-packages/einops/__pycache__/parsing.cpython-311.pyc and b/.venv/Lib/site-packages/einops/__pycache__/parsing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/layers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/einops/layers/__pycache__/__init__.cpython-311.pyc index 73905756..cf79522b 100644 Binary files a/.venv/Lib/site-packages/einops/layers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/einops/layers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/layers/__pycache__/_einmix.cpython-311.pyc b/.venv/Lib/site-packages/einops/layers/__pycache__/_einmix.cpython-311.pyc index abf09e35..e50b7e25 100644 Binary files a/.venv/Lib/site-packages/einops/layers/__pycache__/_einmix.cpython-311.pyc and b/.venv/Lib/site-packages/einops/layers/__pycache__/_einmix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/einops/layers/__pycache__/torch.cpython-311.pyc b/.venv/Lib/site-packages/einops/layers/__pycache__/torch.cpython-311.pyc index 973d4811..20e7815e 100644 Binary files a/.venv/Lib/site-packages/einops/layers/__pycache__/torch.cpython-311.pyc and b/.venv/Lib/site-packages/einops/layers/__pycache__/torch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/__init__.cpython-311.pyc index 0556d9ab..2c5e02b1 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/_api.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/_api.cpython-311.pyc index 2f511bb7..9d5023f8 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/_api.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/_error.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/_error.cpython-311.pyc index 82bc5c88..eb397b92 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/_error.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/_error.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/_soft.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/_soft.cpython-311.pyc index add5ae47..44f7c736 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/_soft.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/_soft.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/_unix.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/_unix.cpython-311.pyc index 69e66269..84ea71b1 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/_unix.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/_unix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/_util.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/_util.cpython-311.pyc index 4576a138..ec0f5d61 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/_util.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/_windows.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/_windows.cpython-311.pyc index e1198684..7fcc058d 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/_windows.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/_windows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/filelock/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/filelock/__pycache__/version.cpython-311.pyc index 785eaf70..8054959d 100644 Binary files a/.venv/Lib/site-packages/filelock/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/filelock/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/frozenlist/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/frozenlist/__pycache__/__init__.cpython-311.pyc index 8d0a7eff..66aa64ad 100644 Binary files a/.venv/Lib/site-packages/frozenlist/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/frozenlist/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/__init__.cpython-311.pyc index 83481fb8..f03e6dbf 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/_version.cpython-311.pyc index fec3056c..01daf464 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/asyn.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/asyn.cpython-311.pyc index 006f8851..1ff73cb4 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/asyn.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/asyn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/caching.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/caching.cpython-311.pyc index 65d515f5..506575ae 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/caching.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/caching.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/callbacks.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/callbacks.cpython-311.pyc index 9cba784f..f40f4842 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/callbacks.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/callbacks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/compression.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/compression.cpython-311.pyc index 292fdfce..ff1e535c 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/compression.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/compression.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/config.cpython-311.pyc index 11dac5d1..ca620d2d 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/core.cpython-311.pyc index 4fb57cea..a562c188 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/dircache.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/dircache.cpython-311.pyc index c5269a70..f0ad8c4d 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/dircache.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/dircache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/exceptions.cpython-311.pyc index 07786473..d69d3e10 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/mapping.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/mapping.cpython-311.pyc index 15790ab8..ac34da8a 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/mapping.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/mapping.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/registry.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/registry.cpython-311.pyc index dd19b350..18a0f684 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/registry.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/registry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/spec.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/spec.cpython-311.pyc index 7c3ead15..226a8207 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/spec.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/spec.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/transaction.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/transaction.cpython-311.pyc index 826de643..f02c405e 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/transaction.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/transaction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/__pycache__/utils.cpython-311.pyc index 9b9ebb5f..4f887d2b 100644 Binary files a/.venv/Lib/site-packages/fsspec/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/implementations/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/implementations/__pycache__/__init__.cpython-311.pyc index 9b1d00d6..3017c3f1 100644 Binary files a/.venv/Lib/site-packages/fsspec/implementations/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/implementations/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/implementations/__pycache__/http.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/implementations/__pycache__/http.cpython-311.pyc index 51405196..671b2bcb 100644 Binary files a/.venv/Lib/site-packages/fsspec/implementations/__pycache__/http.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/implementations/__pycache__/http.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/fsspec/implementations/__pycache__/local.cpython-311.pyc b/.venv/Lib/site-packages/fsspec/implementations/__pycache__/local.cpython-311.pyc index 5751709f..b0aaaaaa 100644 Binary files a/.venv/Lib/site-packages/fsspec/implementations/__pycache__/local.cpython-311.pyc and b/.venv/Lib/site-packages/fsspec/implementations/__pycache__/local.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/__init__.cpython-311.pyc index 5556a770..1e132ecb 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/const.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/const.cpython-311.pyc index bec92949..c2086906 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/const.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/const.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/g2p.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/g2p.cpython-311.pyc index d9bf3e47..8313208e 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/g2p.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/g2p.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/lang.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/lang.cpython-311.pyc index 55993b74..c0eda1b3 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/lang.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/lang.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/phonemize.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/phonemize.cpython-311.pyc index f6b9cf74..b926e5ed 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/phonemize.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/phonemize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/pos.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/pos.cpython-311.pyc index 2c29cc8d..7741347d 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/pos.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/pos.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/resources.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/resources.cpython-311.pyc index badad8ab..465888d0 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/resources.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/resources.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/text_processor.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/text_processor.cpython-311.pyc index 50d03f99..bf8fb129 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/text_processor.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/text_processor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/gruut/__pycache__/utils.cpython-311.pyc index 1d82948d..274eec65 100644 Binary files a/.venv/Lib/site-packages/gruut/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/gruut/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/__init__.cpython-311.pyc index 88591cb2..011d941b 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/accent.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/accent.cpython-311.pyc index 7c8ef543..03091e84 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/accent.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/accent.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/constants.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/constants.cpython-311.pyc index 709ba86d..5446d056 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/constants.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/distances.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/distances.cpython-311.pyc index e922d2bf..e2931c57 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/distances.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/distances.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/espeak.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/espeak.cpython-311.pyc index 04a54117..96d4c927 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/espeak.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/espeak.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/features.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/features.cpython-311.pyc index 70deec0f..bae5e0b6 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/features.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/features.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/phonemes.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/phonemes.cpython-311.pyc index 808badc8..c3c3cc35 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/phonemes.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/phonemes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/sampa.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/sampa.cpython-311.pyc index 66511774..001726df 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/sampa.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/sampa.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/gruut_ipa/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/gruut_ipa/__pycache__/utils.cpython-311.pyc index 938a2b4b..b0517f81 100644 Binary files a/.venv/Lib/site-packages/gruut_ipa/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/gruut_ipa/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER b/.venv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt b/.venv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt new file mode 100644 index 00000000..8f080eae --- /dev/null +++ b/.venv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/Lib/site-packages/h11-0.14.0.dist-info/METADATA b/.venv/Lib/site-packages/h11-0.14.0.dist-info/METADATA new file mode 100644 index 00000000..cf12a82f --- /dev/null +++ b/.venv/Lib/site-packages/h11-0.14.0.dist-info/METADATA @@ -0,0 +1,193 @@ +Metadata-Version: 2.1 +Name: h11 +Version: 0.14.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.7 +License-File: LICENSE.txt +Requires-Dist: typing-extensions ; python_version < "3.8" + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.7-3.10) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. diff --git a/.venv/Lib/site-packages/h11-0.14.0.dist-info/RECORD b/.venv/Lib/site-packages/h11-0.14.0.dist-info/RECORD new file mode 100644 index 00000000..57c1a6ef --- /dev/null +++ b/.venv/Lib/site-packages/h11-0.14.0.dist-info/RECORD @@ -0,0 +1,52 @@ +h11-0.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.14.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.14.0.dist-info/METADATA,sha256=B7pZ0m7WBXNs17vl6hUH9bJTL9s37DaGvY31w7jNxSg,8175 +h11-0.14.0.dist-info/RECORD,, +h11-0.14.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +h11-0.14.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-311.pyc,, +h11/__pycache__/_abnf.cpython-311.pyc,, +h11/__pycache__/_connection.cpython-311.pyc,, +h11/__pycache__/_events.cpython-311.pyc,, +h11/__pycache__/_headers.cpython-311.pyc,, +h11/__pycache__/_readers.cpython-311.pyc,, +h11/__pycache__/_receivebuffer.cpython-311.pyc,, +h11/__pycache__/_state.cpython-311.pyc,, +h11/__pycache__/_util.cpython-311.pyc,, +h11/__pycache__/_version.cpython-311.pyc,, +h11/__pycache__/_writers.cpython-311.pyc,, +h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 +h11/_connection.py,sha256=eS2sorMD0zKLCFiB9lW9W9F_Nzny2tjHa4e6s1ujr1c,26539 +h11/_events.py,sha256=LEfuvg1AbhHaVRwxCd0I-pFn9-ezUOaoL8o2Kvy1PBA,11816 +h11/_headers.py,sha256=RqB8cd8CN0blYPzcLe5qeCh-phv6D1U_CHj4hs67lgQ,10230 +h11/_readers.py,sha256=EbSed0jzwVUiD1nOPAeUcVE4Flf3wXkxfb8c06-OTBM,8383 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=k1VL6SDbaPkSrZ-49ewCXDpuiUS69_46YhbWjuV1qEY,13300 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=LVyTdiZRzIIEv79UyOgbM5iUrJUllEzlCWaJEYBY1zc,686 +h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/__pycache__/__init__.cpython-311.pyc,, +h11/tests/__pycache__/helpers.cpython-311.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-311.pyc,, +h11/tests/__pycache__/test_connection.cpython-311.pyc,, +h11/tests/__pycache__/test_events.cpython-311.pyc,, +h11/tests/__pycache__/test_headers.cpython-311.pyc,, +h11/tests/__pycache__/test_helpers.cpython-311.pyc,, +h11/tests/__pycache__/test_io.cpython-311.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-311.pyc,, +h11/tests/__pycache__/test_state.cpython-311.pyc,, +h11/tests/__pycache__/test_util.cpython-311.pyc,, +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11/tests/helpers.py,sha256=a1EVG_p7xU4wRsa3tMPTRxuaKCmretok9sxXWvqfmQA,3355 +h11/tests/test_against_stdlib_http.py,sha256=cojCHgHXFQ8gWhNlEEwl3trmOpN-5uDukRoHnElqo3A,3995 +h11/tests/test_connection.py,sha256=ZbPLDPclKvjgjAhgk-WlCPBaf17c4XUIV2tpaW08jOI,38720 +h11/tests/test_events.py,sha256=LPVLbcV-NvPNK9fW3rraR6Bdpz1hAlsWubMtNaJ5gHg,4657 +h11/tests/test_headers.py,sha256=qd8T1Zenuz5GbD6wklSJ5G8VS7trrYgMV0jT-SMvqg8,5612 +h11/tests/test_helpers.py,sha256=kAo0CEM4LGqmyyP2ZFmhsyq3UFJqoFfAbzu3hbWreRM,794 +h11/tests/test_io.py,sha256=uCZVnjarkRBkudfC1ij-KSCQ71XWJhnkgkgWWkKgYPQ,16386 +h11/tests/test_receivebuffer.py,sha256=3jGbeJM36Akqg_pAhPb7XzIn2NS6RhPg-Ryg8Eu6ytk,3454 +h11/tests/test_state.py,sha256=rqll9WqFsJPE0zSrtCn9LH659mPKsDeXZ-DwXwleuBQ,8928 +h11/tests/test_util.py,sha256=VO5L4nSFe4pgtSwKuv6u_6l0H7UeizF5WKuHTWreg70,2970 diff --git a/.venv/Lib/site-packages/decorator-5.1.1.dist-info/WHEEL b/.venv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL similarity index 100% rename from .venv/Lib/site-packages/decorator-5.1.1.dist-info/WHEEL rename to .venv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL diff --git a/.venv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt b/.venv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt new file mode 100644 index 00000000..0d24def7 --- /dev/null +++ b/.venv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/.venv/Lib/site-packages/h11/__init__.py b/.venv/Lib/site-packages/h11/__init__.py new file mode 100644 index 00000000..989e92c3 --- /dev/null +++ b/.venv/Lib/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/.venv/Lib/site-packages/h11/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..9388f5b9 Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_abnf.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_abnf.cpython-311.pyc new file mode 100644 index 00000000..01379bf8 Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_abnf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_connection.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_connection.cpython-311.pyc new file mode 100644 index 00000000..b55c2618 Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_events.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_events.cpython-311.pyc new file mode 100644 index 00000000..9de58f5d Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_events.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_headers.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_headers.cpython-311.pyc new file mode 100644 index 00000000..561385d4 Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_headers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_readers.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_readers.cpython-311.pyc new file mode 100644 index 00000000..fc6e5931 Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_readers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc new file mode 100644 index 00000000..d6733e4c Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_state.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_state.cpython-311.pyc new file mode 100644 index 00000000..f70c4d50 Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_state.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_util.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_util.cpython-311.pyc new file mode 100644 index 00000000..a995b866 Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_version.cpython-311.pyc new file mode 100644 index 00000000..0eed353e Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/__pycache__/_writers.cpython-311.pyc b/.venv/Lib/site-packages/h11/__pycache__/_writers.cpython-311.pyc new file mode 100644 index 00000000..e228e44e Binary files /dev/null and b/.venv/Lib/site-packages/h11/__pycache__/_writers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/_abnf.py b/.venv/Lib/site-packages/h11/_abnf.py new file mode 100644 index 00000000..933587fb --- /dev/null +++ b/.venv/Lib/site-packages/h11/_abnf.py @@ -0,0 +1,132 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. +) diff --git a/.venv/Lib/site-packages/h11/_connection.py b/.venv/Lib/site-packages/h11/_connection.py new file mode 100644 index 00000000..d1752707 --- /dev/null +++ b/.venv/Lib/site-packages/h11/_connection.py @@ -0,0 +1,633 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore[return-value] + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() # type: ignore[attr-defined] + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/.venv/Lib/site-packages/h11/_events.py b/.venv/Lib/site-packages/h11/_events.py new file mode 100644 index 00000000..075bf8a4 --- /dev/null +++ b/.venv/Lib/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Any, cast, Dict, List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/.venv/Lib/site-packages/h11/_headers.py b/.venv/Lib/site-packages/h11/_headers.py new file mode 100644 index 00000000..b97d020b --- /dev/null +++ b/.venv/Lib/site-packages/h11/_headers.py @@ -0,0 +1,278 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(rb"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/.venv/Lib/site-packages/h11/_readers.py b/.venv/Lib/site-packages/h11/_readers.py new file mode 100644 index 00000000..08a9574d --- /dev/null +++ b/.venv/Lib/site-packages/h11/_readers.py @@ -0,0 +1,247 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/.venv/Lib/site-packages/h11/_receivebuffer.py b/.venv/Lib/site-packages/h11/_receivebuffer.py new file mode 100644 index 00000000..e5c4e08a --- /dev/null +++ b/.venv/Lib/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/.venv/Lib/site-packages/h11/_state.py b/.venv/Lib/site-packages/h11/_state.py new file mode 100644 index 00000000..3593430a --- /dev/null +++ b/.venv/Lib/site-packages/h11/_state.py @@ -0,0 +1,367 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +StateTransitionType = Dict[ + Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] +] + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server {} event without a pending proposal".format( + server_switch_event + ) + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + "not in a reusable state. self.states={}".format(self.states) + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/.venv/Lib/site-packages/h11/_util.py b/.venv/Lib/site-packages/h11/_util.py new file mode 100644 index 00000000..67184452 --- /dev/null +++ b/.venv/Lib/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/.venv/Lib/site-packages/h11/_version.py b/.venv/Lib/site-packages/h11/_version.py new file mode 100644 index 00000000..4c891130 --- /dev/null +++ b/.venv/Lib/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.14.0" diff --git a/.venv/Lib/site-packages/h11/_writers.py b/.venv/Lib/site-packages/h11/_writers.py new file mode 100644 index 00000000..939cdb91 --- /dev/null +++ b/.venv/Lib/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/.venv/Lib/site-packages/h11/py.typed b/.venv/Lib/site-packages/h11/py.typed new file mode 100644 index 00000000..f5642f79 --- /dev/null +++ b/.venv/Lib/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/.venv/Lib/site-packages/h11/tests/__init__.py b/.venv/Lib/site-packages/h11/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..afba1ac7 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/helpers.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/helpers.cpython-311.pyc new file mode 100644 index 00000000..7442fd19 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-311.pyc new file mode 100644 index 00000000..b3a97b16 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_connection.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_connection.cpython-311.pyc new file mode 100644 index 00000000..6952ec31 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_events.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_events.cpython-311.pyc new file mode 100644 index 00000000..d7d483f6 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_events.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_headers.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_headers.cpython-311.pyc new file mode 100644 index 00000000..e251ffe1 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_headers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_helpers.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_helpers.cpython-311.pyc new file mode 100644 index 00000000..f0136399 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_io.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_io.cpython-311.pyc new file mode 100644 index 00000000..a72a74da Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_io.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-311.pyc new file mode 100644 index 00000000..bf24ffd8 Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_state.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_state.cpython-311.pyc new file mode 100644 index 00000000..d413fc0f Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_state.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/__pycache__/test_util.cpython-311.pyc b/.venv/Lib/site-packages/h11/tests/__pycache__/test_util.cpython-311.pyc new file mode 100644 index 00000000..d0e79f6e Binary files /dev/null and b/.venv/Lib/site-packages/h11/tests/__pycache__/test_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/h11/tests/data/test-file b/.venv/Lib/site-packages/h11/tests/data/test-file new file mode 100644 index 00000000..d0be0a6c --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/.venv/Lib/site-packages/h11/tests/helpers.py b/.venv/Lib/site-packages/h11/tests/helpers.py new file mode 100644 index 00000000..571be444 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/helpers.py @@ -0,0 +1,101 @@ +from typing import cast, List, Type, Union, ValuesView + +from .._connection import Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER +from .._util import Sentinel + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +def get_all_events(conn: Connection) -> List[Event]: + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + event = cast(Event, event) + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn: Connection, data: bytes) -> List[Event]: + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events: List[Event]) -> List[Event]: + out_events: List[Event] = [] + for event in in_events: + if type(event) is Data: + event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1] = Data( + data=out_events[-1].data + event.data, + chunk_start=out_events[-1].chunk_start, + chunk_end=out_events[-1].chunk_end, + ) + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self) -> None: + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self) -> ValuesView[Connection]: + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send( + self, + role: Type[Sentinel], + send_events: Union[List[Event], Event], + expect: Union[List[Event], Event, Literal["match"]] = "match", + ) -> bytes: + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/.venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py b/.venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 00000000..d2ee1314 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,115 @@ +import json +import os.path +import socket +import socketserver +import threading +from contextlib import closing, contextmanager +from http.server import SimpleHTTPRequestHandler +from typing import Callable, Generator +from urllib.request import urlopen + +import h11 + + +@contextmanager +def socket_server( + handler: Callable[..., socketserver.BaseRequestHandler] +) -> Generator[socketserver.TCPServer, None, None]: + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path: str) -> str: + return test_file_path + + +def test_h11_as_client() -> None: + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( # type: ignore[arg-type] + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self) -> None: + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + assert request is not None + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server() -> None: + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/.venv/Lib/site-packages/h11/tests/test_connection.py b/.venv/Lib/site-packages/h11/tests/test_connection.py new file mode 100644 index 00000000..73a27b98 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1122 @@ +from typing import Any, cast, Dict, List, Optional, Tuple, Type + +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError, RemoteProtocolError, Sentinel +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive() -> None: + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] + ) + + assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] + + +def test__body_framing() -> None: + def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp( + status_code: int = 200, cl: Optional[int] = None, te: bool = False + ) -> Response: + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl: Optional[int] = None, te: bool = False) -> Request: + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length() -> None: + with pytest.raises(ValueError): + Connection("CLIENT") # type: ignore + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked() -> None: + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries() -> None: + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client() -> None: + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response() -> None: + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + user_headers = cast(List[Tuple[str, str]], user_headers) + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling() -> None: + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue() -> None: + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure() -> None: + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple() -> None: + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send( + SERVER, + [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send( + SERVER, + [ + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + + +def test_pipelining() -> None: + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch() -> None: + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple() -> None: + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states() -> None: + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close() -> None: + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile() -> None: + class SendfilePlaceholder: + def __len__(self) -> int: + return 10 + + placeholder = SendfilePlaceholder() + + def setup( + header: Tuple[str, str], http_version: str + ) -> Tuple[Connection, Optional[List[bytes]]]: + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] # type: ignore + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data # type: ignore + data[data.index(placeholder)] = b"x" * 10 # type: ignore + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore + + c, data = setup(None, "1.0") # type: ignore + assert data == [placeholder] # type: ignore + assert c.our_state is SEND_BODY + + +def test_errors() -> None: + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role: Type[Sentinel]) -> Connection: + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] + bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing() -> None: + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop() -> None: + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout() -> None: + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) + + +# This used to raise IndexError +def test_empty_request() -> None: + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello + ], +) +def test_early_detection_of_invalid_request(data: bytes) -> None: + c = Connection(SERVER) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello + ], +) +def test_early_detection_of_invalid_response(data: bytes) -> None: + c = Connection(CLIENT) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers() -> None: + def setup(method: bytes, http_version: bytes) -> Connection: + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body() -> None: + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" # type: ignore + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/.venv/Lib/site-packages/h11/tests/test_events.py b/.venv/Lib/site-packages/h11/tests/test_events.py new file mode 100644 index 00000000..bc6c3137 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_events.py @@ -0,0 +1,150 @@ +from http import HTTPStatus + +import pytest + +from .. import _events +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._util import LocalProtocolError + + +def test_events() -> None: + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_byte in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_byte) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + # Request method is validated + with pytest.raises(LocalProtocolError): + Request( + method="GET / HTTP/1.1", + target=target, + headers=[("Host", "a")], + http_version="1.1", + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code() -> None: + # https://github.com/python-hyper/h11/issues/72 + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int + + +def test_header_casing() -> None: + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert len(r.headers) == 2 + assert r.headers[0] == (b"host", b"example.org") + assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive"), + ] diff --git a/.venv/Lib/site-packages/h11/tests/test_headers.py b/.venv/Lib/site-packages/h11/tests/test_headers.py new file mode 100644 index 00000000..ba53d088 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_headers.py @@ -0,0 +1,157 @@ +import pytest + +from .._events import Request +from .._headers import ( + get_comma_header, + has_expect_100_continue, + Headers, + normalize_and_validate, + set_comma_header, +) +from .._util import LocalProtocolError + + +def test_normalize_and_validate() -> None: + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + assert normalize_and_validate( + [("Content-Length", "0"), ("Content-Length", "0")] + ) == [(b"content-length", b"0")] + assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ + (b"content-length", b"0") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate( + [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] + ) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1 , 1,2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header() -> None: + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue() -> None: + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/.venv/Lib/site-packages/h11/tests/test_helpers.py b/.venv/Lib/site-packages/h11/tests/test_helpers.py new file mode 100644 index 00000000..c329c767 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,32 @@ +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .helpers import normalize_data_events + + +def test_normalize_data_events() -> None: + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/.venv/Lib/site-packages/h11/tests/test_io.py b/.venv/Lib/site-packages/h11/tests/test_io.py new file mode 100644 index 00000000..2b47c0ea --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_io.py @@ -0,0 +1,572 @@ +from typing import Any, Callable, Generator, List + +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._headers import Headers, normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer: Callable[..., None], obj: Any) -> bytes: + got_list: List[bytes] = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer: Any, obj: Any, expected: Any) -> None: + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data: bytes) -> ReceiveBuffer: + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader: Any, data: bytes, expected: Any) -> None: + def check(got: Any) -> None: + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual() -> None: + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, Headers([]), b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual() -> None: + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # Tolerate headers line endings (\r\n and \n) + # \n\r\b between headers and body + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader", "val")], + http_version="1.1", + reason="OK", + ), + ) + + # delimited only with \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # mixed \r\n and \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes() -> None: + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter( + reader: Any, buf: bytes, do_eof: bool +) -> Generator[Any, None, None]: + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args: Any) -> List[Event]: + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader() -> None: + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader() -> None: + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader() -> None: + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + t_body_reader( + ChunkedReader, + b"5 \r\n01234\r\n" + b"0\r\n\r\n", + [Data(data=b"01234"), EndOfMessage()], + ) + + +def test_ContentLengthWriter() -> None: + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter() -> None: + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer() -> None: + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line() -> None: + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path() -> None: + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies() -> None: + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first() -> None: + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"Host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/.venv/Lib/site-packages/h11/tests/test_receivebuffer.py b/.venv/Lib/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 00000000..21a3870b --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,135 @@ +import re +from typing import Tuple + +import pytest + +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer() -> None: + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"123\n456\r\n789\r\n" + + assert b.maybe_extract_next_line() == b"123\n456\r\n" + assert bytes(b) == b"789\r\n" + + assert b.maybe_extract_next_line() == b"789\r\n" + assert bytes(b) == b"" + + b += b"12\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r" + + b += b"345\n\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r345\n\r" + + # here we stopped at the middle of b"\r\n" delimiter + + b += b"\n6789aaa123\r\n" + assert b.maybe_extract_next_line() == b"12\r345\n\r\n" + assert b.maybe_extract_next_line() == b"6789aaa123\r\n" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" + + +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ( + b"HTTP/1.1 200 OK\r\n", + b"Content-type: text/plain\r\n", + b"Connection: close\r\n", + b"\r\n", + b"Some body", + ), + id="with_crlf_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_lf_only_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\r\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_mixed_crlf_and_lf", + ), + ], +) +def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: + b = ReceiveBuffer() + + for line in data: + b += line + + lines = b.maybe_extract_lines() + + assert lines == [ + b"HTTP/1.1 200 OK", + b"Content-type: text/plain", + b"Connection: close", + ] + assert bytes(b) == b"Some body" diff --git a/.venv/Lib/site-packages/h11/tests/test_state.py b/.venv/Lib/site-packages/h11/tests/test_state.py new file mode 100644 index 00000000..bc974e63 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_state.py @@ -0,0 +1,271 @@ +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + CLOSED, + ConnectionState, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError + + +def test_ConnectionState() -> None: + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive() -> None: + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE() -> None: + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied() -> None: + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted() -> None: + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch() -> None: + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch() -> None: + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: # type: ignore[attr-defined] + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse() -> None: + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal() -> None: + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/.venv/Lib/site-packages/h11/tests/test_util.py b/.venv/Lib/site-packages/h11/tests/test_util.py new file mode 100644 index 00000000..79bc0951 --- /dev/null +++ b/.venv/Lib/site-packages/h11/tests/test_util.py @@ -0,0 +1,112 @@ +import re +import sys +import traceback +from typing import NoReturn + +import pytest + +from .._util import ( + bytesify, + LocalProtocolError, + ProtocolError, + RemoteProtocolError, + Sentinel, + validate, +) + + +def test_ProtocolError() -> None: + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError() -> None: + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk() -> NoReturn: + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate() -> None: + my_re = re.compile(rb"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting() -> None: + my_re = re.compile(rb"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel() -> None: + class S(Sentinel, metaclass=Sentinel): + pass + + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + + class S2(Sentinel, metaclass=Sentinel): + pass + + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify() -> None: + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify("\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/.venv/Lib/site-packages/hangul_romanize/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/hangul_romanize/__pycache__/__init__.cpython-311.pyc index 9fc76289..af80e5ab 100644 Binary files a/.venv/Lib/site-packages/hangul_romanize/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/hangul_romanize/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/hangul_romanize/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/hangul_romanize/__pycache__/core.cpython-311.pyc index 66d32392..738b90f8 100644 Binary files a/.venv/Lib/site-packages/hangul_romanize/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/hangul_romanize/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/hangul_romanize/__pycache__/rule.cpython-311.pyc b/.venv/Lib/site-packages/hangul_romanize/__pycache__/rule.cpython-311.pyc index 0aeb4b05..86562db7 100644 Binary files a/.venv/Lib/site-packages/hangul_romanize/__pycache__/rule.cpython-311.pyc and b/.venv/Lib/site-packages/hangul_romanize/__pycache__/rule.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/INSTALLER b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/METADATA b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/METADATA new file mode 100644 index 00000000..0e200953 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/METADATA @@ -0,0 +1,607 @@ +Metadata-Version: 2.3 +Name: httpcore +Version: 1.0.5 +Summary: A minimal low-level HTTP client. +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Homepage, https://www.encode.io/httpcore/ +Project-URL: Source, https://github.com/encode/httpcore +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: certifi +Requires-Dist: h11<0.15,>=0.13 +Provides-Extra: asyncio +Requires-Dist: anyio<5.0,>=4.0; extra == 'asyncio' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: trio +Requires-Dist: trio<0.26.0,>=0.22.0; extra == 'trio' +Description-Content-Type: text/markdown + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Thread-safe / task-safe connection pooling. +* HTTP(S) proxy & SOCKS proxy support. +* Supports HTTP/1.1 and HTTP/2. +* Provides both sync and async interfaces. +* Async backend support for `asyncio` and `trio`. + +## Requirements + +Python 3.8+ + +## Installation + +For HTTP/1.1 only support, install with: + +```shell +$ pip install httpcore +``` + +There are also a number of optional extras available... + +```shell +$ pip install httpcore['asyncio,trio,http2,socks'] +``` + +# Sending requests + +Send an HTTP request: + +```python +import httpcore + +response = httpcore.request("GET", "https://www.example.com/") + +print(response) +# +print(response.status) +# 200 +print(response.headers) +# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] +print(response.content) +# b'\n\n\nExample Domain\n\n\n ...' +``` + +The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. + +```python +import httpcore + +http = httpcore.ConnectionPool() +response = http.request("GET", "https://www.example.com/") +``` + +Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). + +## Motivation + +You *probably* don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + +## Dependencies + +The `httpcore` package has the following dependencies... + +* `h11` +* `certifi` + +And the following optional extras... + +* `anyio` - Required by `pip install httpcore['asyncio']`. +* `trio` - Required by `pip install httpcore['trio']`. +* `h2` - Required by `pip install httpcore['http2']`. +* `socksio` - Required by `pip install httpcore['socks']`. + +## Versioning + +We use [SEMVER for our versioning policy](https://semver.org/). + +For changes between package versions please see our [project changelog](CHANGELOG.md). + +We recommend pinning your requirements either the most current major version, or a more specific version range: + +```python +pip install 'httpcore==1.*' +``` +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## 1.0.5 (March 27th, 2024) + +- Handle `EndOfStream` exception for anyio backend. (#899) +- Allow trio `0.25.*` series in package dependancies. (#903) + +## 1.0.4 (February 21st, 2024) + +- Add `target` request extension. (#888) +- Fix support for connection `Upgrade` and `CONNECT` when some data in the stream has been read. (#882) + +## 1.0.3 (February 13th, 2024) + +- Fix support for async cancellations. (#880) +- Fix trace extension when used with socks proxy. (#849) +- Fix SSL context for connections using the "wss" scheme (#869) + +## 1.0.2 (November 10th, 2023) + +- Fix `float("inf")` timeouts in `Event.wait` function. (#846) + +## 1.0.1 (November 3rd, 2023) + +- Fix pool timeout to account for the total time spent retrying. (#823) +- Raise a neater RuntimeError when the correct async deps are not installed. (#826) +- Add support for synchronous TLS-in-TLS streams. (#840) + +## 1.0.0 (October 6th, 2023) + +From version 1.0 our async support is now optional, as the package has minimal dependencies by default. + +For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. + +The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. + +- Async support becomes fully optional. (#809) +- Add support for Python 3.12. (#807) + +## 0.18.0 (September 8th, 2023) + +- Add support for HTTPS proxies. (#745, #786) +- Drop Python 3.7 support. (#727) +- Handle `sni_hostname` extension with SOCKS proxy. (#774) +- Handle HTTP/1.1 half-closed connections gracefully. (#641) +- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) + +## 0.17.3 (July 5th, 2023) + +- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) +- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) +- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) +- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) +- Drop Python 3.7 support. (#727) + +## 0.17.2 (May 23th, 2023) + +- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) +- Improve logging with per-module logger names. (#690) +- Add `sni_hostname` request extension. (#696) +- Resolve race condition during import of `anyio` package. (#692) +- Enable TCP_NODELAY for all synchronous sockets. (#651) + +## 0.17.1 (May 17th, 2023) + +- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) +- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) +- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) +- Fix edge-case exception when removing requests from the connection pool. (#680) +- Fix pool timeout edge-case. (#688) + +## 0.17.0 (March 16th, 2023) + +- Add DEBUG level logging. (#648) +- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) +- Increase the allowable HTTP header size to 100kB. (#647) +- Add `retries` option to SOCKS proxy classes. (#643) + +## 0.16.3 (December 20th, 2022) + +- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) +- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) +- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) +- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) + +## 0.16.2 (November 25th, 2022) + +- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) +- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) + +## 0.16.1 (November 17th, 2022) + +- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) + +## 0.16.0 (October 11th, 2022) + +- Support HTTP/1.1 informational responses. (#581) +- Fix async cancellation behaviour. (#580) +- Support `h11` 0.14. (#579) + +## 0.15.0 (May 17th, 2022) + +- Drop Python 3.6 support (#535) +- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) +- Switch to explicit `typing.Optional` for type hints. (#513) +- For `trio` map OSError exceptions to `ConnectError`. (#543) + +## 0.14.7 (February 4th, 2022) + +- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) +- Fix AttributeError that happened when Socks5Connection were terminated. (#501) + +## 0.14.6 (February 1st, 2022) + +- Fix SOCKS support for `http://` URLs. (#492) +- Resolve race condition around exceptions during streaming a response. (#491) + +## 0.14.5 (January 18th, 2022) + +- SOCKS proxy support. (#478) +- Add proxy_auth argument to HTTPProxy. (#481) +- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) + +## 0.14.4 (January 5th, 2022) + +- Support HTTP/2 on HTTPS tunnelling proxies. (#468) +- Fix proxy headers missing on HTTP forwarding. (#456) +- Only instantiate SSL context if required. (#457) +- More robust HTTP/2 handling. (#253, #439, #440, #441) + +## 0.14.3 (November 17th, 2021) + +- Fix race condition when removing closed connections from the pool. (#437) + +## 0.14.2 (November 16th, 2021) + +- Failed connections no longer remain in the pool. (Pull #433) + +## 0.14.1 (November 12th, 2021) + +- `max_connections` becomes optional. (Pull #429) +- `certifi` is now included in the install dependancies. (Pull #428) +- `h2` is now strictly optional. (Pull #428) + +## 0.14.0 (November 11th, 2021) + +The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. + +Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. + +See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. + +There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... + +* Log the point at which the connection is established, and the IP/port on which it is made. +* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) +* Log SSL version info / certificate info. + +Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancellation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. diff --git a/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/RECORD b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/RECORD new file mode 100644 index 00000000..4ec7e4e7 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/RECORD @@ -0,0 +1,68 @@ +httpcore-1.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-1.0.5.dist-info/METADATA,sha256=YgW4guGB2OBYCvLOVSIgUoXRbjO-aWDbHXnYIUaJzTU,20969 +httpcore-1.0.5.dist-info/RECORD,, +httpcore-1.0.5.dist-info/WHEEL,sha256=uNdcs2TADwSd5pVaP0Z_kcjcvvTUklh2S7bxZMF8Uj0,87 +httpcore-1.0.5.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore/__init__.py,sha256=fBzHStfNOukxTWXQFx6W_fv0MAcKntslGJqhHliu45E,3337 +httpcore/__pycache__/__init__.cpython-311.pyc,, +httpcore/__pycache__/_api.cpython-311.pyc,, +httpcore/__pycache__/_exceptions.cpython-311.pyc,, +httpcore/__pycache__/_models.cpython-311.pyc,, +httpcore/__pycache__/_ssl.cpython-311.pyc,, +httpcore/__pycache__/_synchronization.cpython-311.pyc,, +httpcore/__pycache__/_trace.cpython-311.pyc,, +httpcore/__pycache__/_utils.cpython-311.pyc,, +httpcore/_api.py,sha256=IBR18qZQ8ETcghJXC1Gd-30WuKYRS0EyF2eC80_OBQ8,3167 +httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 +httpcore/_async/__pycache__/__init__.cpython-311.pyc,, +httpcore/_async/__pycache__/connection.cpython-311.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-311.pyc,, +httpcore/_async/__pycache__/http11.cpython-311.pyc,, +httpcore/_async/__pycache__/http2.cpython-311.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-311.pyc,, +httpcore/_async/__pycache__/interfaces.cpython-311.pyc,, +httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc,, +httpcore/_async/connection.py,sha256=63vgzLIgX3bjq-RsjK68UWS_DWNkdvnKP0vJRK3Prfs,8484 +httpcore/_async/connection_pool.py,sha256=vjyIrwkj3QfzndhXOBrlyplJidWnhrj0foAmIpSj5tA,15609 +httpcore/_async/http11.py,sha256=yvohHUXwdJv9gN-dEJv4C5F8_NiyOtcflua1Q3BRjew,13978 +httpcore/_async/http2.py,sha256=_AgUDRcjAIlbncbOjW0I-iqYN1PDgRcFUIGfzZ2fKcI,23881 +httpcore/_async/http_proxy.py,sha256=hl4t-PahlAuCGtKNYRx4LSgjx1ZuspE9oDBaL6BOess,14851 +httpcore/_async/interfaces.py,sha256=J2iq9rs7x3nKS6iCfntjHY0Woast6V_HuXuE8rs3HmA,4486 +httpcore/_async/socks_proxy.py,sha256=T8y927RATyy4A9GMduRVUh13ZeRq8Ts8JP24bFVQ6n8,13934 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-311.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-311.pyc,, +httpcore/_backends/__pycache__/auto.cpython-311.pyc,, +httpcore/_backends/__pycache__/base.cpython-311.pyc,, +httpcore/_backends/__pycache__/mock.cpython-311.pyc,, +httpcore/_backends/__pycache__/sync.cpython-311.pyc,, +httpcore/_backends/__pycache__/trio.cpython-311.pyc,, +httpcore/_backends/anyio.py,sha256=3SbX3SMTwT4SYmoTWbzWazwZg0mfQ3jDR8lI_n9JKr8,5295 +httpcore/_backends/auto.py,sha256=Q_iQjNuwJseqBxeYJYtiaGzFs08_LGI3K_egYrixEqE,1683 +httpcore/_backends/base.py,sha256=Qsb8b_PSiVP1ldHHGXHxQzJ1Qlzj2r8KR9KQeANkSbE,3218 +httpcore/_backends/mock.py,sha256=S4IADhC6kE22ge_jR_WHlEUkD6QAsXnwz26DSWZLcG4,4179 +httpcore/_backends/sync.py,sha256=LAomvc-MAlot5-S9CCFxnr561aDp9yhyfs_65WeCkZ4,8086 +httpcore/_backends/trio.py,sha256=INOeHEkA8pO6AsSqjColWcayM0FQSyGi1hpaQghjrCs,6078 +httpcore/_exceptions.py,sha256=7zb3KNiG0qmfUNIdFgdaUSbn2Pu3oztghi6Vg7i-LJU,1185 +httpcore/_models.py,sha256=7DlYrkyc2z-orQrnztCUmtBY4gLMz18FjPP9e5Q-fFg,16614 +httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 +httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 +httpcore/_sync/__pycache__/__init__.cpython-311.pyc,, +httpcore/_sync/__pycache__/connection.cpython-311.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc,, +httpcore/_sync/__pycache__/http11.cpython-311.pyc,, +httpcore/_sync/__pycache__/http2.cpython-311.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc,, +httpcore/_sync/__pycache__/interfaces.cpython-311.pyc,, +httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc,, +httpcore/_sync/connection.py,sha256=n7YFLjYsRv4cf0CXEIqNsUqR_NPNvFQN8dGqjj0mv9U,8273 +httpcore/_sync/connection_pool.py,sha256=lglrm_FWU9J_fwCNFqgcaWCebgZw49V7KhcS0wBZ-Ok,15277 +httpcore/_sync/http11.py,sha256=9-IgEawTTbkHuOE8O3LODhp3KCJ4tAo5vmyA4UE66pU,13564 +httpcore/_sync/http2.py,sha256=_fPbMtCAVqGXKFYo3OmNNkucDuVTF69vMEbSFE2Jodo,23345 +httpcore/_sync/http_proxy.py,sha256=82oin8vjt2a7YmmVvz7sXEZSBuajK-mHDF-EwnR_pJ0,14613 +httpcore/_sync/interfaces.py,sha256=EM4PTf-rgkclzisFcrTyx1G8FwraoffE8rbckOznX_o,4365 +httpcore/_sync/socks_proxy.py,sha256=T13QSceeEAg1PM9Yh7Nk-DoqI28TIUqDS-9O3OSC9Uc,13707 +httpcore/_synchronization.py,sha256=HjyPscK40YPQ-_nTcoBXd3S6IqbHTNuw7lTaqtgA3s4,9464 +httpcore/_trace.py,sha256=akf5PsWVq3rZjqmXniomU59OY37K7JHoeNDCQ4GU84E,3954 +httpcore/_utils.py,sha256=9QPh5ib4JilWX4dBCC_XO6wdBY4b0kbUGgfV3QfBANc,1525 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/WHEEL b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/WHEEL new file mode 100644 index 00000000..0309176f --- /dev/null +++ b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.22.4 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/licenses/LICENSE.md b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/licenses/LICENSE.md new file mode 100644 index 00000000..311b2b56 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore-1.0.5.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/Lib/site-packages/httpcore/__init__.py b/.venv/Lib/site-packages/httpcore/__init__.py new file mode 100644 index 00000000..014213ba --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/__init__.py @@ -0,0 +1,139 @@ +from ._api import request, stream +from ._async import ( + AsyncConnectionInterface, + AsyncConnectionPool, + AsyncHTTP2Connection, + AsyncHTTP11Connection, + AsyncHTTPConnection, + AsyncHTTPProxy, + AsyncSOCKSProxy, +) +from ._backends.base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) +from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream +from ._backends.sync import SyncBackend +from ._exceptions import ( + ConnectError, + ConnectionNotAvailable, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Origin, Request, Response +from ._ssl import default_ssl_context +from ._sync import ( + ConnectionInterface, + ConnectionPool, + HTTP2Connection, + HTTP11Connection, + HTTPConnection, + HTTPProxy, + SOCKSProxy, +) + +# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. +try: + from ._backends.anyio import AnyIOBackend +except ImportError: # pragma: nocover + + class AnyIOBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = ( + "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." + ) + raise RuntimeError(msg) + + +# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. +try: + from ._backends.trio import TrioBackend +except ImportError: # pragma: nocover + + class TrioBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." + raise RuntimeError(msg) + + +__all__ = [ + # top-level requests + "request", + "stream", + # models + "Origin", + "URL", + "Request", + "Response", + # async + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", + # sync + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", + # network backends, implementations + "SyncBackend", + "AnyIOBackend", + "TrioBackend", + # network backends, mock implementations + "AsyncMockBackend", + "AsyncMockStream", + "MockBackend", + "MockStream", + # network backends, interface + "AsyncNetworkStream", + "AsyncNetworkBackend", + "NetworkStream", + "NetworkBackend", + # util + "default_ssl_context", + "SOCKET_OPTION", + # exceptions + "ConnectionNotAvailable", + "ProxyError", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "UnsupportedProtocol", + "TimeoutException", + "PoolTimeout", + "ConnectTimeout", + "ReadTimeout", + "WriteTimeout", + "NetworkError", + "ConnectError", + "ReadError", + "WriteError", +] + +__version__ = "1.0.5" + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..b6f729d4 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/_api.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/_api.cpython-311.pyc new file mode 100644 index 00000000..99096f3c Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/_exceptions.cpython-311.pyc new file mode 100644 index 00000000..3ad25250 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/_models.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/_models.cpython-311.pyc new file mode 100644 index 00000000..fff20bee Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/_models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/_ssl.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/_ssl.cpython-311.pyc new file mode 100644 index 00000000..ff4a467a Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/_ssl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/_synchronization.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/_synchronization.cpython-311.pyc new file mode 100644 index 00000000..88ac3117 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/_synchronization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/_trace.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/_trace.cpython-311.pyc new file mode 100644 index 00000000..2208572d Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/_trace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/__pycache__/_utils.cpython-311.pyc new file mode 100644 index 00000000..6c831dc3 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_api.py b/.venv/Lib/site-packages/httpcore/_api.py new file mode 100644 index 00000000..854235f5 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_api.py @@ -0,0 +1,92 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from ._models import URL, Extensions, HeaderTypes, Response +from ._sync.connection_pool import ConnectionPool + + +def request( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Response: + """ + Sends an HTTP request, returning the response. + + ``` + response = httpcore.request("GET", "https://www.example.com/") + ``` + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + return pool.request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + + +@contextmanager +def stream( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Iterator[Response]: + """ + Sends an HTTP request, returning the response within a content manager. + + ``` + with httpcore.stream("GET", "https://www.example.com/") as response: + ... + ``` + + When using the `stream()` function, the body of the response will not be + automatically read. If you want to access the response body you should + either use `content = response.read()`, or `for chunk in response.iter_content()`. + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + with pool.stream( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) as response: + yield response diff --git a/.venv/Lib/site-packages/httpcore/_async/__init__.py b/.venv/Lib/site-packages/httpcore/_async/__init__.py new file mode 100644 index 00000000..88dc7f01 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..22243f83 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection.cpython-311.pyc new file mode 100644 index 00000000..6c36a249 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-311.pyc new file mode 100644 index 00000000..9abc2439 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/http11.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/http11.cpython-311.pyc new file mode 100644 index 00000000..1abea181 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/http11.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/http2.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/http2.cpython-311.pyc new file mode 100644 index 00000000..b90fd274 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/http2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-311.pyc new file mode 100644 index 00000000..2f2f1a9d Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/interfaces.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/interfaces.cpython-311.pyc new file mode 100644 index 00000000..3766a582 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/interfaces.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc new file mode 100644 index 00000000..f2130057 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_async/connection.py b/.venv/Lib/site-packages/httpcore/_async/connection.py new file mode 100644 index 00000000..2f439cf0 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/connection.py @@ -0,0 +1,220 @@ +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class AsyncHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: Optional[AsyncConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + async with self._request_lock: + if self._connection is None: + stream = await self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + async def aclose(self) -> None: + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTPConnection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() diff --git a/.venv/Lib/site-packages/httpcore/_async/connection_pool.py b/.venv/Lib/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 00000000..018b0ba2 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,380 @@ +import ssl +import sys +from types import TracebackType +from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock +from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface + + +class AsyncPoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: Optional[AsyncConnectionInterface] = None + self._connection_acquired = AsyncEvent() + + def assign_to_connection( + self, connection: Optional[AsyncConnectionInterface] + ) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = AsyncEvent() + + async def wait_for_connection( + self, timeout: Optional[float] = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class AsyncConnectionPool(AsyncRequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: List[AsyncConnectionInterface] = [] + self._requests: List[AsyncPoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = AsyncThreadLock() + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncHTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> List[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = AsyncPoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + await self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = await pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = await connection.handle_async_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + await self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> List[AsyncConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + avilable_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if avilable_connections: + # log: "reusing existing connection" + connection = avilable_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + async def _close_connections(self, closing: List[AsyncConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with AsyncShieldCancellation(): + for connection in closing: + await connection.aclose() + + async def aclose(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + await self._close_connections(closing_connections) + + async def __aenter__(self) -> "AsyncConnectionPool": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: AsyncIterable[bytes], + pool_request: AsyncPoolRequest, + pool: AsyncConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + async def __aiter__(self) -> AsyncIterator[bytes]: + try: + async for part in self._stream: + yield part + except BaseException as exc: + await self.aclose() + raise exc from None + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + with AsyncShieldCancellation(): + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + await self._pool._close_connections(closing) diff --git a/.venv/Lib/site-packages/httpcore/_async/http11.py b/.venv/Lib/site-packages/httpcore/_async/http11.py new file mode 100644 index 00000000..0493a923 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/http11.py @@ -0,0 +1,386 @@ +import enum +import logging +import ssl +import time +from types import TracebackType +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + List, + Optional, + Tuple, + Type, + Union, +) + +import h11 + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP11Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + async with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, AsyncIterable) + async for chunk in request.stream: + event = h11.Data(data=chunk) + await self._send_event(event, timeout=timeout) + + await self._send_event(h11.EndOfMessage(), timeout=timeout) + + async def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + async def _receive_response_headers( + self, request: Request + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + async def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + async def _response_closed(self) -> None: + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTP11Connection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() + + +class AsyncHTTP11UpgradeStream(AsyncNetworkStream): + def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return await self._stream.read(max_bytes, timeout) + + async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + await self._stream.write(buffer, timeout) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> AsyncNetworkStream: + return await self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> Any: + return self._stream.get_extra_info(info) diff --git a/.venv/Lib/site-packages/httpcore/_async/http2.py b/.venv/Lib/site-packages/httpcore/_async/http2.py new file mode 100644 index 00000000..c201ee4c --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/http2.py @@ -0,0 +1,589 @@ +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP2Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: typing.Optional[float] = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( + None + ) + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + async with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + async with Trace("send_connection_init", logger, request, kwargs): + await self._send_connection_init(**kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() + + await self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) + + # Sending the request... + + async def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) + + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) + + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTP2Connection": + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/.venv/Lib/site-packages/httpcore/_async/http_proxy.py b/.venv/Lib/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 00000000..4aa7d874 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,368 @@ +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union + +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + +class AsyncHTTPProxy(AsyncConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = await self._connection.handle_async_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/Lib/site-packages/httpcore/_async/interfaces.py b/.venv/Lib/site-packages/httpcore/_async/interfaces.py new file mode 100644 index 00000000..c998dd27 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import asynccontextmanager +from typing import AsyncIterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @asynccontextmanager + async def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/.venv/Lib/site-packages/httpcore/_async/socks_proxy.py b/.venv/Lib/site-packages/httpcore/_async/socks_proxy.py new file mode 100644 index 00000000..f839603f --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,342 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: typing.Optional[AsyncConnectionInterface] = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + async with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/Lib/site-packages/httpcore/_backends/__init__.py b/.venv/Lib/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/httpcore/_backends/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..8adb95bd Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_backends/__pycache__/anyio.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/anyio.cpython-311.pyc new file mode 100644 index 00000000..48aeed41 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/anyio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_backends/__pycache__/auto.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/auto.cpython-311.pyc new file mode 100644 index 00000000..55c81429 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/auto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_backends/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/base.cpython-311.pyc new file mode 100644 index 00000000..0e303a85 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_backends/__pycache__/mock.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/mock.cpython-311.pyc new file mode 100644 index 00000000..fb8479ed Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/mock.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_backends/__pycache__/sync.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/sync.cpython-311.pyc new file mode 100644 index 00000000..19596186 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/sync.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_backends/__pycache__/trio.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/trio.cpython-311.pyc new file mode 100644 index 00000000..8cb2c369 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_backends/__pycache__/trio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_backends/anyio.py b/.venv/Lib/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 00000000..5731f5e7 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,147 @@ +import ssl +import typing + +import anyio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream + + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + anyio.EndOfStream: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: + return + + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: + exc_map = { + TimeoutError: ConnectTimeout, + anyio.BrokenResourceError: ConnectError, + anyio.EndOfStream: ConnectError, + } + with map_exceptions(exc_map): + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, + ssl_context=ssl_context, + hostname=server_hostname, + standard_compatible=False, + server_side=False, + ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + if socket_options is None: + socket_options = [] # pragma: no cover + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) # pragma: nocover diff --git a/.venv/Lib/site-packages/httpcore/_backends/auto.py b/.venv/Lib/site-packages/httpcore/_backends/auto.py new file mode 100644 index 00000000..3ac05f4d --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,51 @@ +import typing +from typing import Optional + +from .._synchronization import current_async_library +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): + backend = current_async_library() + if backend == "trio": + from .trio import TrioBackend + + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend + + self._backend = AnyIOBackend() + + async def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, + ) + + async def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) + + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/.venv/Lib/site-packages/httpcore/_backends/base.py b/.venv/Lib/site-packages/httpcore/_backends/base.py new file mode 100644 index 00000000..6cadedb5 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_backends/base.py @@ -0,0 +1,103 @@ +import ssl +import time +import typing + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +class NetworkStream: + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "NetworkStream": + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover + + +class AsyncNetworkStream: + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + raise NotImplementedError() # pragma: nocover + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "AsyncNetworkStream": + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class AsyncNetworkBackend: + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: nocover diff --git a/.venv/Lib/site-packages/httpcore/_backends/mock.py b/.venv/Lib/site-packages/httpcore/_backends/mock.py new file mode 100644 index 00000000..f7aefebf --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_backends/mock.py @@ -0,0 +1,142 @@ +import ssl +import typing +from typing import Optional + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/.venv/Lib/site-packages/httpcore/_backends/sync.py b/.venv/Lib/site-packages/httpcore/_backends/sync.py new file mode 100644 index 00000000..7b7b417d --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,239 @@ +import socket +import ssl +import sys +import typing +from functools import partial + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream + + +class TLSinTLSStream(NetworkStream): # pragma: no cover + """ + Because the standard `SSLContext.wrap_socket` method does + not work for `SSLSocket` objects, we need this class + to implement TLS stream using an underlying `SSLObject` + instance in order to support TLS on top of TLS. + """ + + # Defined in RFC 8449 + TLS_RECORD_SIZE = 16384 + + def __init__( + self, + sock: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ): + self._sock = sock + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + + self.ssl_obj = ssl_context.wrap_bio( + incoming=self._incoming, + outgoing=self._outgoing, + server_hostname=server_hostname, + ) + + self._sock.settimeout(timeout) + self._perform_io(self.ssl_obj.do_handshake) + + def _perform_io( + self, + func: typing.Callable[..., typing.Any], + ) -> typing.Any: + ret = None + + while True: + errno = None + try: + ret = func() + except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: + errno = e.errno + + self._sock.sendall(self._outgoing.read()) + + if errno == ssl.SSL_ERROR_WANT_READ: + buf = self._sock.recv(self.TLS_RECORD_SIZE) + + if buf: + self._incoming.write(buf) + else: + self._incoming.write_eof() + if errno is None: + return ret + + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return typing.cast( + bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)) + ) + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + while buffer: + nsent = self._perform_io(partial(self.ssl_obj.write, buffer)) + buffer = buffer[nsent:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "NetworkStream": + raise NotImplementedError() + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self.ssl_obj + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + if not buffer: + return + + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> NetworkStream: + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + # If the underlying socket has already been upgraded + # to the TLS layer (i.e. is an instance of SSLSocket), + # we need some additional smarts to support TLS-in-TLS. + return TLSinTLSStream( + self._sock, ssl_context, server_hostname, timeout + ) + else: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) + source_address = None if local_address is None else (local_address, 0) + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, + timeout, + source_address=source_address, + ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SyncStream(sock) + + def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) + sock.connect(path) + return SyncStream(sock) diff --git a/.venv/Lib/site-packages/httpcore/_backends/trio.py b/.venv/Lib/site-packages/httpcore/_backends/trio.py new file mode 100644 index 00000000..b1626d28 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,161 @@ +import ssl +import typing + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class TrioStream(AsyncNetworkStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self._stream = stream + + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: + return + + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self._stream, + ssl_context=ssl_context, + server_hostname=server_hostname, + https_compatible=True, + server_side=False, + ) + with map_exceptions(exc_map): + try: + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream + + +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_tcp_stream( + host=host, port=port, local_address=local_address + ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) # pragma: nocover diff --git a/.venv/Lib/site-packages/httpcore/_exceptions.py b/.venv/Lib/site-packages/httpcore/_exceptions.py new file mode 100644 index 00000000..81e7fc61 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_exceptions.py @@ -0,0 +1,81 @@ +import contextlib +from typing import Iterator, Mapping, Type + +ExceptionMapping = Mapping[Type[Exception], Type[Exception]] + + +@contextlib.contextmanager +def map_exceptions(map: ExceptionMapping) -> Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from exc + raise # pragma: nocover + + +class ConnectionNotAvailable(Exception): + pass + + +class ProxyError(Exception): + pass + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass diff --git a/.venv/Lib/site-packages/httpcore/_models.py b/.venv/Lib/site-packages/httpcore/_models.py new file mode 100644 index 00000000..dadee79f --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_models.py @@ -0,0 +1,492 @@ +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, +) +from urllib.parse import urlparse + +# Functions for typechecking... + + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] +HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None] + +Extensions = MutableMapping[str, Any] + + +def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: + """ + Any arguments that are ultimately represented as bytes can be specified + either as bytes or as strings. + + However we enforce that any string arguments must only contain characters in + the plain ASCII range. chr(0)...chr(127). If you need to use characters + outside that range then be precise, and use a byte-wise argument. + """ + if isinstance(value, str): + try: + return value.encode("ascii") + except UnicodeEncodeError: + raise TypeError(f"{name} strings may not include unicode characters.") + elif isinstance(value, bytes): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") + + +def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": + """ + Type check for URL parameters. + """ + if isinstance(value, (bytes, str)): + return URL(value) + elif isinstance(value, URL): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") + + +def enforce_headers( + value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str +) -> List[Tuple[bytes, bytes]]: + """ + Convienence function that ensure all items in request or response headers + are either bytes or strings in the plain ASCII range. + """ + if value is None: + return [] + elif isinstance(value, Mapping): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value.items() + ] + elif isinstance(value, Sequence): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value + ] + + seen_type = type(value).__name__ + raise TypeError( + f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." + ) + + +def enforce_stream( + value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str +) -> Union[Iterable[bytes], AsyncIterable[bytes]]: + if value is None: + return ByteStream(b"") + elif isinstance(value, bytes): + return ByteStream(value) + return value + + +# * https://tools.ietf.org/html/rfc3986#section-3.2.3 +# * https://url.spec.whatwg.org/#url-miscellaneous +# * https://url.spec.whatwg.org/#scheme-state +DEFAULT_PORTS = { + b"ftp": 21, + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, +} + + +def include_request_headers( + headers: List[Tuple[bytes, bytes]], + *, + url: "URL", + content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], +) -> List[Tuple[bytes, bytes]]: + headers_set = set(k.lower() for k, v in headers) + + if b"host" not in headers_set: + default_port = DEFAULT_PORTS.get(url.scheme) + if url.port is None or url.port == default_port: + header_value = url.host + else: + header_value = b"%b:%d" % (url.host, url.port) + headers = [(b"Host", header_value)] + headers + + if ( + content is not None + and b"content-length" not in headers_set + and b"transfer-encoding" not in headers_set + ): + if isinstance(content, bytes): + content_length = str(len(content)).encode("ascii") + headers += [(b"Content-Length", content_length)] + else: + headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover + + return headers + + +# Interfaces for byte streams... + + +class ByteStream: + """ + A container for non-streaming content, and that supports both sync and async + stream iteration. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" + + +class Origin: + def __init__(self, scheme: bytes, host: bytes, port: int) -> None: + self.scheme = scheme + self.host = host + self.port = port + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, Origin) + and self.scheme == other.scheme + and self.host == other.host + and self.port == other.port + ) + + def __str__(self) -> str: + scheme = self.scheme.decode("ascii") + host = self.host.decode("ascii") + port = str(self.port) + return f"{scheme}://{host}:{port}" + + +class URL: + """ + Represents the URL against which an HTTP request may be made. + + The URL may either be specified as a plain string, for convienence: + + ```python + url = httpcore.URL("https://www.example.com/") + ``` + + Or be constructed with explicitily pre-parsed components: + + ```python + url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') + ``` + + Using this second more explicit style allows integrations that are using + `httpcore` to pass through URLs that have already been parsed in order to use + libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures + that URL parsing is treated identically at both the networking level and at any + higher layers of abstraction. + + The four components are important here, as they allow the URL to be precisely + specified in a pre-parsed format. They also allow certain types of request to + be created that could not otherwise be expressed. + + For example, an HTTP request to `http://www.example.com/` forwarded via a proxy + at `http://localhost:8080`... + + ```python + # Constructs an HTTP request with a complete URL as the target: + # GET https://www.example.com/ HTTP/1.1 + url = httpcore.URL( + scheme=b'http', + host=b'localhost', + port=8080, + target=b'https://www.example.com/' + ) + request = httpcore.Request( + method="GET", + url=url + ) + ``` + + Another example is constructing an `OPTIONS *` request... + + ```python + # Constructs an 'OPTIONS *' HTTP request: + # OPTIONS * HTTP/1.1 + url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') + request = httpcore.Request(method="OPTIONS", url=url) + ``` + + This kind of request is not possible to formulate with a URL string, + because the `/` delimiter is always used to demark the target from the + host/port portion of the URL. + + For convenience, string-like arguments may be specified either as strings or + as bytes. However, once a request is being issue over-the-wire, the URL + components are always ultimately required to be a bytewise representation. + + In order to avoid any ambiguity over character encodings, when strings are used + as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. + If you require a bytewise representation that is outside this range you must + handle the character encoding directly, and pass a bytes instance. + """ + + def __init__( + self, + url: Union[bytes, str] = "", + *, + scheme: Union[bytes, str] = b"", + host: Union[bytes, str] = b"", + port: Optional[int] = None, + target: Union[bytes, str] = b"", + ) -> None: + """ + Parameters: + url: The complete URL as a string or bytes. + scheme: The URL scheme as a string or bytes. + Typically either `"http"` or `"https"`. + host: The URL host as a string or bytes. Such as `"www.example.com"`. + port: The port to connect to. Either an integer or `None`. + target: The target of the HTTP request. Such as `"/items?search=red"`. + """ + if url: + parsed = urlparse(enforce_bytes(url, name="url")) + self.scheme = parsed.scheme + self.host = parsed.hostname or b"" + self.port = parsed.port + self.target = (parsed.path or b"/") + ( + b"?" + parsed.query if parsed.query else b"" + ) + else: + self.scheme = enforce_bytes(scheme, name="scheme") + self.host = enforce_bytes(host, name="host") + self.port = port + self.target = enforce_bytes(target, name="target") + + @property + def origin(self) -> Origin: + default_port = { + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, + b"socks5": 1080, + }[self.scheme] + return Origin( + scheme=self.scheme, host=self.host, port=self.port or default_port + ) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, URL) + and other.scheme == self.scheme + and other.host == self.host + and other.port == self.port + and other.target == self.target + ) + + def __bytes__(self) -> bytes: + if self.port is None: + return b"%b://%b%b" % (self.scheme, self.host, self.target) + return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(scheme={self.scheme!r}, " + f"host={self.host!r}, port={self.port!r}, target={self.target!r})" + ) + + +class Request: + """ + An HTTP request. + """ + + def __init__( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + method: The HTTP request method, either as a string or bytes. + For example: `GET`. + url: The request URL, either as a `URL` instance, or as a string or bytes. + For example: `"https://www.example.com".` + headers: The HTTP request headers. + content: The content of the request body. + extensions: A dictionary of optional extra information included on + the request. Possible keys include `"timeout"`, and `"trace"`. + """ + self.method: bytes = enforce_bytes(method, name="method") + self.url: URL = enforce_url(url, name="url") + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + if "target" in self.extensions: + self.url = URL( + scheme=self.url.scheme, + host=self.url.host, + port=self.url.port, + target=self.extensions["target"], + ) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.method!r}]>" + + +class Response: + """ + An HTTP response. + """ + + def __init__( + self, + status: int, + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + status: The HTTP status code of the response. For example `200`. + headers: The HTTP response headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the responseself.Possible keys include `"http_version"`, + `"reason_phrase"`, and `"network_stream"`. + """ + self.status: int = status + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + self._stream_consumed = False + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + if isinstance(self.stream, Iterable): + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'response.read()' first." + ) + else: + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'await response.aread()' first." + ) + return self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.status}]>" + + # Sync interface... + + def read(self) -> bytes: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an asynchronous response using 'response.read()'. " + "You should use 'await response.aread()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.iter_stream()]) + return self._content + + def iter_stream(self) -> Iterator[bytes]: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an asynchronous response using 'for ... in " + "response.iter_stream()'. " + "You should use 'async for ... in response.aiter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'for ... in response.iter_stream()' more than once." + ) + self._stream_consumed = True + for chunk in self.stream: + yield chunk + + def close(self) -> None: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to close an asynchronous response using 'response.close()'. " + "You should use 'await response.aclose()' instead." + ) + if hasattr(self.stream, "close"): + self.stream.close() + + # Async interface... + + async def aread(self) -> bytes: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an synchronous response using " + "'await response.aread()'. " + "You should use 'response.read()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_stream()]) + return self._content + + async def aiter_stream(self) -> AsyncIterator[bytes]: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an synchronous response using 'async for ... in " + "response.aiter_stream()'. " + "You should use 'for ... in response.iter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'async for ... in response.aiter_stream()' " + "more than once." + ) + self._stream_consumed = True + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to close a synchronous response using " + "'await response.aclose()'. " + "You should use 'response.close()' instead." + ) + if hasattr(self.stream, "aclose"): + await self.stream.aclose() diff --git a/.venv/Lib/site-packages/httpcore/_ssl.py b/.venv/Lib/site-packages/httpcore/_ssl.py new file mode 100644 index 00000000..c99c5a67 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_ssl.py @@ -0,0 +1,9 @@ +import ssl + +import certifi + + +def default_ssl_context() -> ssl.SSLContext: + context = ssl.create_default_context() + context.load_verify_locations(certifi.where()) + return context diff --git a/.venv/Lib/site-packages/httpcore/_sync/__init__.py b/.venv/Lib/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 00000000..b476d76d --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..03c2e4a6 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection.cpython-311.pyc new file mode 100644 index 00000000..8d8e13a7 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc new file mode 100644 index 00000000..2c53bbee Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http11.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http11.cpython-311.pyc new file mode 100644 index 00000000..f5f2ddd1 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http11.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http2.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http2.cpython-311.pyc new file mode 100644 index 00000000..4bbecad8 Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc new file mode 100644 index 00000000..03938b2c Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-311.pyc new file mode 100644 index 00000000..0f94f00d Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc new file mode 100644 index 00000000..3d275f9b Binary files /dev/null and b/.venv/Lib/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpcore/_sync/connection.py b/.venv/Lib/site-packages/httpcore/_sync/connection.py new file mode 100644 index 00000000..c3890f34 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,220 @@ +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class HTTPConnection(ConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: Optional[ConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + with self._request_lock: + if self._connection is None: + stream = self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return self._connection.handle_request(request) + + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def close(self) -> None: + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTPConnection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() diff --git a/.venv/Lib/site-packages/httpcore/_sync/connection_pool.py b/.venv/Lib/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 00000000..8dcf348c --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,380 @@ +import ssl +import sys +from types import TracebackType +from typing import Iterable, Iterator, Iterable, List, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import Event, ShieldCancellation, ThreadLock +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class PoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: Optional[ConnectionInterface] = None + self._connection_acquired = Event() + + def assign_to_connection( + self, connection: Optional[ConnectionInterface] + ) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = Event() + + def wait_for_connection( + self, timeout: Optional[float] = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class ConnectionPool(RequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: List[ConnectionInterface] = [] + self._requests: List[PoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = ThreadLock() + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return HTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> List[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + def handle_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = PoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = connection.handle_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, Iterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> List[ConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + avilable_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if avilable_connections: + # log: "reusing existing connection" + connection = avilable_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + def _close_connections(self, closing: List[ConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with ShieldCancellation(): + for connection in closing: + connection.close() + + def close(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + self._close_connections(closing_connections) + + def __enter__(self) -> "ConnectionPool": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: Iterable[bytes], + pool_request: PoolRequest, + pool: ConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + def __iter__(self) -> Iterator[bytes]: + try: + for part in self._stream: + yield part + except BaseException as exc: + self.close() + raise exc from None + + def close(self) -> None: + if not self._closed: + self._closed = True + with ShieldCancellation(): + if hasattr(self._stream, "close"): + self._stream.close() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + self._pool._close_connections(closing) diff --git a/.venv/Lib/site-packages/httpcore/_sync/http11.py b/.venv/Lib/site-packages/httpcore/_sync/http11.py new file mode 100644 index 00000000..a74ff8e8 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,386 @@ +import enum +import logging +import ssl +import time +from types import TracebackType +from typing import ( + Any, + Iterable, + Iterator, + List, + Optional, + Tuple, + Type, + Union, +) + +import h11 + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP11Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = HTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, Iterable) + for chunk in request.stream: + event = h11.Data(data=chunk) + self._send_event(event, timeout=timeout) + + self._send_event(h11.EndOfMessage(), timeout=timeout) + + def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + def _receive_response_headers( + self, request: Request + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + def _receive_response_body(self, request: Request) -> Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + def _response_closed(self) -> None: + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTP11Connection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() + + +class HTTP11UpgradeStream(NetworkStream): + def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return self._stream.read(max_bytes, timeout) + + def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + self._stream.write(buffer, timeout) + + def close(self) -> None: + self._stream.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> NetworkStream: + return self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> Any: + return self._stream.get_extra_info(info) diff --git a/.venv/Lib/site-packages/httpcore/_sync/http2.py b/.venv/Lib/site-packages/httpcore/_sync/http2.py new file mode 100644 index 00000000..1ee4bbb3 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,589 @@ +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP2Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: typing.Optional[float] = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( + None + ) + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + with Trace("send_connection_init", logger, request, kwargs): + self._send_connection_init(**kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() + + self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) + + # Sending the request... + + def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) + + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) + + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTP2Connection": + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/.venv/Lib/site-packages/httpcore/_sync/http_proxy.py b/.venv/Lib/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 00000000..6acac9a7 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,368 @@ +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union + +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + +class HTTPProxy(ConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return self._connection.handle_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = self._connection.handle_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/Lib/site-packages/httpcore/_sync/interfaces.py b/.venv/Lib/site-packages/httpcore/_sync/interfaces.py new file mode 100644 index 00000000..5e95be1e --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextmanager + def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/.venv/Lib/site-packages/httpcore/_sync/socks_proxy.py b/.venv/Lib/site-packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 00000000..502e4d7f --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,342 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: typing.Optional[ConnectionInterface] = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/Lib/site-packages/httpcore/_synchronization.py b/.venv/Lib/site-packages/httpcore/_synchronization.py new file mode 100644 index 00000000..9619a398 --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_synchronization.py @@ -0,0 +1,317 @@ +import threading +from types import TracebackType +from typing import Optional, Type + +from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions + +# Our async synchronization primatives use either 'anyio' or 'trio' depending +# on if they're running under asyncio or trio. + +try: + import trio +except ImportError: # pragma: nocover + trio = None # type: ignore + +try: + import anyio +except ImportError: # pragma: nocover + anyio = None # type: ignore + + +def current_async_library() -> str: + # Determine if we're running under trio or asyncio. + # See https://sniffio.readthedocs.io/en/latest/ + try: + import sniffio + except ImportError: # pragma: nocover + environment = "asyncio" + else: + environment = sniffio.current_async_library() + + if environment not in ("asyncio", "trio"): # pragma: nocover + raise RuntimeError("Running under an unsupported async environment.") + + if environment == "asyncio" and anyio is None: # pragma: nocover + raise RuntimeError( + "Running with asyncio requires installation of 'httpcore[asyncio]'." + ) + + if environment == "trio" and trio is None: # pragma: nocover + raise RuntimeError( + "Running with trio requires installation of 'httpcore[trio]'." + ) + + return environment + + +class AsyncLock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_lock = trio.Lock() + elif self._backend == "asyncio": + self._anyio_lock = anyio.Lock() + + async def __aenter__(self) -> "AsyncLock": + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_lock.acquire() + elif self._backend == "asyncio": + await self._anyio_lock.acquire() + + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_lock.release() + elif self._backend == "asyncio": + self._anyio_lock.release() + + +class AsyncThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __enter__(self) -> "AsyncThreadLock": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + pass + + +class AsyncEvent: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_event = trio.Event() + elif self._backend == "asyncio": + self._anyio_event = anyio.Event() + + def set(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + self._trio_event.set() + elif self._backend == "asyncio": + self._anyio_event.set() + + async def wait(self, timeout: Optional[float] = None) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} + timeout_or_inf = float("inf") if timeout is None else timeout + with map_exceptions(trio_exc_map): + with trio.fail_after(timeout_or_inf): + await self._trio_event.wait() + elif self._backend == "asyncio": + anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} + with map_exceptions(anyio_exc_map): + with anyio.fail_after(timeout): + await self._anyio_event.wait() + + +class AsyncSemaphore: + def __init__(self, bound: int) -> None: + self._bound = bound + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a semaphore with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_semaphore = trio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + elif self._backend == "asyncio": + self._anyio_semaphore = anyio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + + async def acquire(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_semaphore.acquire() + elif self._backend == "asyncio": + await self._anyio_semaphore.acquire() + + async def release(self) -> None: + if self._backend == "trio": + self._trio_semaphore.release() + elif self._backend == "asyncio": + self._anyio_semaphore.release() + + +class AsyncShieldCancellation: + # For certain portions of our codebase where we're dealing with + # closing connections during exception handling we want to shield + # the operation from being cancelled. + # + # with AsyncShieldCancellation(): + # ... # clean-up operations, shielded from cancellation. + + def __init__(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a shielded scope with the correct implementation. + """ + self._backend = current_async_library() + + if self._backend == "trio": + self._trio_shield = trio.CancelScope(shield=True) + elif self._backend == "asyncio": + self._anyio_shield = anyio.CancelScope(shield=True) + + def __enter__(self) -> "AsyncShieldCancellation": + if self._backend == "trio": + self._trio_shield.__enter__() + elif self._backend == "asyncio": + self._anyio_shield.__enter__() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_shield.__exit__(exc_type, exc_value, traceback) + elif self._backend == "asyncio": + self._anyio_shield.__exit__(exc_type, exc_value, traceback) + + +# Our thread-based synchronization primitives... + + +class Lock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> "Lock": + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self._lock.release() + + +class ThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> "ThreadLock": + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self._lock.release() + + +class Event: + def __init__(self) -> None: + self._event = threading.Event() + + def set(self) -> None: + self._event.set() + + def wait(self, timeout: Optional[float] = None) -> None: + if timeout == float("inf"): # pragma: no cover + timeout = None + if not self._event.wait(timeout=timeout): + raise PoolTimeout() # pragma: nocover + + +class Semaphore: + def __init__(self, bound: int) -> None: + self._semaphore = threading.Semaphore(value=bound) + + def acquire(self) -> None: + self._semaphore.acquire() + + def release(self) -> None: + self._semaphore.release() + + +class ShieldCancellation: + # Thread-synchronous codebases don't support cancellation semantics. + # We have this class because we need to mirror the async and sync + # cases within our package, but it's just a no-op. + def __enter__(self) -> "ShieldCancellation": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + pass diff --git a/.venv/Lib/site-packages/httpcore/_trace.py b/.venv/Lib/site-packages/httpcore/_trace.py new file mode 100644 index 00000000..b122a53e --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_trace.py @@ -0,0 +1,105 @@ +import inspect +import logging +from types import TracebackType +from typing import Any, Dict, Optional, Type + +from ._models import Request + + +class Trace: + def __init__( + self, + name: str, + logger: logging.Logger, + request: Optional[Request] = None, + kwargs: Optional[Dict[str, Any]] = None, + ) -> None: + self.name = name + self.logger = logger + self.trace_extension = ( + None if request is None else request.extensions.get("trace") + ) + self.debug = self.logger.isEnabledFor(logging.DEBUG) + self.kwargs = kwargs or {} + self.return_value: Any = None + self.should_trace = self.debug or self.trace_extension is not None + self.prefix = self.logger.name.split(".")[-1] + + def trace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + ret = self.trace_extension(prefix_and_name, info) + if inspect.iscoroutine(ret): # pragma: no cover + raise TypeError( + "If you are using a synchronous interface, " + "the callback of the `trace` extension should " + "be a normal function instead of an asynchronous function." + ) + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + def __enter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + self.trace(f"{self.name}.started", info) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + self.trace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + self.trace(f"{self.name}.failed", info) + + async def atrace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + coro = self.trace_extension(prefix_and_name, info) + if not inspect.iscoroutine(coro): # pragma: no cover + raise TypeError( + "If you're using an asynchronous interface, " + "the callback of the `trace` extension should " + "be an asynchronous function rather than a normal function." + ) + await coro + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + async def __aenter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + await self.atrace(f"{self.name}.started", info) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + await self.atrace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + await self.atrace(f"{self.name}.failed", info) diff --git a/.venv/Lib/site-packages/httpcore/_utils.py b/.venv/Lib/site-packages/httpcore/_utils.py new file mode 100644 index 00000000..df5dea8f --- /dev/null +++ b/.venv/Lib/site-packages/httpcore/_utils.py @@ -0,0 +1,36 @@ +import select +import socket +import sys +import typing + + +def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: # pragma: nocover + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if ( + sys.platform == "win32" or getattr(select, "poll", None) is None + ): # pragma: nocover + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/.venv/Lib/site-packages/httpcore/py.typed b/.venv/Lib/site-packages/httpcore/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/httpx-0.27.0.dist-info/INSTALLER b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/httpx-0.27.0.dist-info/METADATA b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/METADATA new file mode 100644 index 00000000..b5ec37c7 --- /dev/null +++ b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/METADATA @@ -0,0 +1,209 @@ +Metadata-Version: 2.1 +Name: httpx +Version: 0.27.0 +Summary: The next generation HTTP client. +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Homepage, https://github.com/encode/httpx +Project-URL: Source, https://github.com/encode/httpx +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: anyio +Requires-Dist: certifi +Requires-Dist: httpcore==1.* +Requires-Dist: idna +Requires-Dist: sniffio +Provides-Extra: brotli +Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli' +Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli' +Provides-Extra: cli +Requires-Dist: click==8.*; extra == 'cli' +Requires-Dist: pygments==2.*; extra == 'cli' +Requires-Dist: rich<14,>=10; extra == 'cli' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Description-Content-Type: text/markdown + +

+ HTTPX +

+ +

HTTPX - A next-generation HTTP client for Python.

+ +

+ + Test Suite + + + Package version + +

+ +HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated +command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync +and async APIs**. + +--- + +Install HTTPX using pip: + +```shell +$ pip install httpx +``` + +Now, let's get started: + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the command-line client. + +```shell +$ pip install 'httpx[cli]' # The command line client is an optional dependency. +``` + +Which now allows us to use HTTPX directly from the command-line... + +

+ httpx --help +

+ +Sending a request... + +

+ httpx http://httpbin.org/json +

+ +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* An integrated command-line client. +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +$ pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +$ pip install httpx[http2] +``` + +HTTPX requires Python 3.8+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. +* `certifi` - SSL certificates. +* `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. + +As well as these optional installs: + +* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* +* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* +* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* +* `click` - Command line client support. *(Optional, with `httpx[cli]`)* +* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +--- + +

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

+ +## Release Information + +### Deprecated + +* The `app=...` shortcut has been deprecated. Use the explicit style of `transport=httpx.WSGITransport()` or `transport=httpx.ASGITransport()` instead. + +### Fixed + +* Respect the `http1` argument while configuring proxy transports. (#3023) +* Fix RFC 2069 mode digest authentication. (#3045) + + +--- + +[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/.venv/Lib/site-packages/httpx-0.27.0.dist-info/RECORD b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/RECORD new file mode 100644 index 00000000..f2d96805 --- /dev/null +++ b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/RECORD @@ -0,0 +1,56 @@ +../../Scripts/httpx.exe,sha256=fHdwa71DlNSQbwcGWlE0c-Hy2BTC5OUkyJ4gBUGV6uE,108390 +httpx-0.27.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.27.0.dist-info/METADATA,sha256=IU6gbQ_C5MYQcso6ftA80CYv5PFomXiTV4telUS5oRk,7184 +httpx-0.27.0.dist-info/RECORD,, +httpx-0.27.0.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87 +httpx-0.27.0.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 +httpx-0.27.0.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx/__init__.py,sha256=oCxVAsePEy5DE9eLhGAAq9H3RBGZUDaUROtGEyzbBRo,3210 +httpx/__pycache__/__init__.cpython-311.pyc,, +httpx/__pycache__/__version__.cpython-311.pyc,, +httpx/__pycache__/_api.cpython-311.pyc,, +httpx/__pycache__/_auth.cpython-311.pyc,, +httpx/__pycache__/_client.cpython-311.pyc,, +httpx/__pycache__/_compat.cpython-311.pyc,, +httpx/__pycache__/_config.cpython-311.pyc,, +httpx/__pycache__/_content.cpython-311.pyc,, +httpx/__pycache__/_decoders.cpython-311.pyc,, +httpx/__pycache__/_exceptions.cpython-311.pyc,, +httpx/__pycache__/_main.cpython-311.pyc,, +httpx/__pycache__/_models.cpython-311.pyc,, +httpx/__pycache__/_multipart.cpython-311.pyc,, +httpx/__pycache__/_status_codes.cpython-311.pyc,, +httpx/__pycache__/_types.cpython-311.pyc,, +httpx/__pycache__/_urlparse.cpython-311.pyc,, +httpx/__pycache__/_urls.cpython-311.pyc,, +httpx/__pycache__/_utils.cpython-311.pyc,, +httpx/__version__.py,sha256=IAHwuJkw3XUFRtOMccAyFbrjpZ3C0udEyrIq2Yrtk5k,108 +httpx/_api.py,sha256=G0EqHYvmusoCiDYvn5i45lJOaQsorhKqLvhnLITCn0Y,12928 +httpx/_auth.py,sha256=G3ithlfScnw0qV2uLmcvQ9iYSpnlV1y72Jk5WXKl5ns,11830 +httpx/_client.py,sha256=YzAIIQkS0iK2mbEjHmMZZm7SOvnxXX4W8N8SpFveuYk,67490 +httpx/_compat.py,sha256=rJERfjHkRvvHFVfltbHyCVcAboNsfEeN6j_00Z2C4k8,1563 +httpx/_config.py,sha256=M5BHACPDt_ZzcKYdqMM1YiS6uHzuWhtbqpu1VL2rKsQ,12204 +httpx/_content.py,sha256=nuK0vo7FLdablYSjXg7kDN6Qh25mTgLxXu5JDoXvQ8U,8047 +httpx/_decoders.py,sha256=HyX51vac2bRb9XWvYpYxXVIQkQpYJuB6ZMAWJhjdNzg,9904 +httpx/_exceptions.py,sha256=vnB78_LL-JhjQKP9nbU6lmxmMyI3b4ImkZc3B_Ik3Ho,7922 +httpx/_main.py,sha256=LcRXtGghiTux7yj0pGXQXx7PNfr3EHE3VcxBcCY4RcE,15635 +httpx/_models.py,sha256=4X3uCAiCm1e7gwnBh777L3lWV6NdjdgfXSg1Nk47JsQ,42303 +httpx/_multipart.py,sha256=CkS8cH5Nau1YrvizDSCRhdK13fltMV2-GnvM3msGRzw,8885 +httpx/_status_codes.py,sha256=n-6km_2zd2bn175zxug8L8xy16Eyx2v0KowqGrhSNf4,5618 +httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx/_transports/__pycache__/__init__.cpython-311.pyc,, +httpx/_transports/__pycache__/asgi.cpython-311.pyc,, +httpx/_transports/__pycache__/base.cpython-311.pyc,, +httpx/_transports/__pycache__/default.cpython-311.pyc,, +httpx/_transports/__pycache__/mock.cpython-311.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-311.pyc,, +httpx/_transports/asgi.py,sha256=TplpV_7STqay5uvPN_Q1c-K0i9Loy1LX4W0ndBg9fXI,5490 +httpx/_transports/base.py,sha256=Iv2ioumj7e-UBd_wU2i01_3Y4vAJRKLy8t0V99OF3Vc,2472 +httpx/_transports/default.py,sha256=lt9V3TJbSVoWmh4CktSI9Q1eFllFtOxNp7CsP0ZWNzM,13290 +httpx/_transports/mock.py,sha256=I_re3UXInPkN11eA2zACJzBJDvXEEtj-4eral48I7zs,1202 +httpx/_transports/wsgi.py,sha256=HeYO7Th2vKbbE5O3c5Z6Q_si77SF9OLOCW9FVTGZdO0,4795 +httpx/_types.py,sha256=lveH-nW6V3VLKeY-EffHVDkOxCe94Irg-ebo3LxQ13s,3391 +httpx/_urlparse.py,sha256=z1ZFA2PClbW0-TQEh9M_oyctpxuW9nc1ZXDcA9Sv6bs,17720 +httpx/_urls.py,sha256=yCaLvbmxI1j7gWSEYffKvYG2j749OmqI5sFCQrsB2Vk,21783 +httpx/_utils.py,sha256=lByQlK36pmXTJa7WxlWEfB48tcKb9fxI8xEO4ayi1JM,13858 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/Lib/site-packages/httpx-0.27.0.dist-info/WHEEL b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/WHEEL new file mode 100644 index 00000000..5998f3aa --- /dev/null +++ b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/Lib/site-packages/httpx-0.27.0.dist-info/entry_points.txt b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/entry_points.txt new file mode 100644 index 00000000..8ae96007 --- /dev/null +++ b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +httpx = httpx:main diff --git a/.venv/Lib/site-packages/httpx-0.27.0.dist-info/licenses/LICENSE.md b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/licenses/LICENSE.md new file mode 100644 index 00000000..ab79d16a --- /dev/null +++ b/.venv/Lib/site-packages/httpx-0.27.0.dist-info/licenses/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/Lib/site-packages/httpx/__init__.py b/.venv/Lib/site-packages/httpx/__init__.py new file mode 100644 index 00000000..f61112f8 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/__init__.py @@ -0,0 +1,138 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import delete, get, head, options, patch, post, put, request, stream +from ._auth import Auth, BasicAuth, DigestAuth, NetRCAuth +from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client +from ._config import Limits, Proxy, Timeout, create_ssl_context +from ._content import ByteStream +from ._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + CookieConflict, + DecodingError, + HTTPError, + HTTPStatusError, + InvalidURL, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + RequestError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + StreamError, + TimeoutException, + TooManyRedirects, + TransportError, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.mock import MockTransport +from ._transports.wsgi import WSGITransport +from ._types import AsyncByteStream, SyncByteStream +from ._urls import URL, QueryParams + +try: + from ._main import main +except ImportError: # pragma: no cover + + def main() -> None: # type: ignore + import sys + + print( + "The httpx command line client could not run because the required " + "dependencies were not installed.\nMake sure you've installed " + "everything with: pip install 'httpx[cli]'" + ) + sys.exit(1) + + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "main", + "MockTransport", + "NetRCAuth", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/.venv/Lib/site-packages/httpx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..60e9c4d7 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/__version__.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/__version__.cpython-311.pyc new file mode 100644 index 00000000..5c978f9c Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/__version__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_api.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_api.cpython-311.pyc new file mode 100644 index 00000000..02409b4c Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_auth.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_auth.cpython-311.pyc new file mode 100644 index 00000000..8f958586 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_auth.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_client.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_client.cpython-311.pyc new file mode 100644 index 00000000..226b92a0 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_client.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_compat.cpython-311.pyc new file mode 100644 index 00000000..4ec02c84 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_config.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_config.cpython-311.pyc new file mode 100644 index 00000000..d0235b4e Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_content.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_content.cpython-311.pyc new file mode 100644 index 00000000..eb97b5d7 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_content.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_decoders.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_decoders.cpython-311.pyc new file mode 100644 index 00000000..bee1333c Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_decoders.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_exceptions.cpython-311.pyc new file mode 100644 index 00000000..c778b8fd Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_main.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_main.cpython-311.pyc new file mode 100644 index 00000000..aedf4bd6 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_models.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_models.cpython-311.pyc new file mode 100644 index 00000000..85f245f5 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_multipart.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_multipart.cpython-311.pyc new file mode 100644 index 00000000..8c2e07aa Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_multipart.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_status_codes.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_status_codes.cpython-311.pyc new file mode 100644 index 00000000..30c5c876 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_status_codes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_types.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_types.cpython-311.pyc new file mode 100644 index 00000000..919ea1c7 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_urlparse.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_urlparse.cpython-311.pyc new file mode 100644 index 00000000..a6e550cb Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_urlparse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_urls.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_urls.cpython-311.pyc new file mode 100644 index 00000000..795c5ca4 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_urls.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/httpx/__pycache__/_utils.cpython-311.pyc new file mode 100644 index 00000000..e87e2d88 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/__version__.py b/.venv/Lib/site-packages/httpx/__version__.py new file mode 100644 index 00000000..c121a898 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.27.0" diff --git a/.venv/Lib/site-packages/httpx/_api.py b/.venv/Lib/site-packages/httpx/_api.py new file mode 100644 index 00000000..b5821cc4 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_api.py @@ -0,0 +1,467 @@ +from __future__ import annotations + +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + URLTypes, + VerifyTypes, +) + + +def request( + method: str, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxy=proxy, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxy=proxy, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) as response: + yield response + + +def get( + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/.venv/Lib/site-packages/httpx/_auth.py b/.venv/Lib/site-packages/httpx/_auth.py new file mode 100644 index 00000000..903e3996 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_auth.py @@ -0,0 +1,345 @@ +from __future__ import annotations + +import hashlib +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Cookies, Request, Response +from ._utils import to_bytes, to_str, unquote + +if typing.TYPE_CHECKING: # pragma: no cover + from hashlib import _Hash + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class NetRCAuth(Auth): + """ + Use a 'netrc' file to lookup basic auth credentials based on the url host. + """ + + def __init__(self, file: str | None = None) -> None: + # Lazily import 'netrc'. + # There's no need for us to load this module unless 'NetRCAuth' is being used. + import netrc + + self._netrc_info = netrc.netrc(file) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + auth_info = self._netrc_info.authenticators(request.url.host) + if auth_info is None or not auth_info[2]: + # The netrc file did not have authentication credentials for this host. + yield request + else: + # Build a basic auth header with credentials from the netrc file. + request.headers["Authorization"] = self._build_auth_header( + username=auth_info[0], password=auth_info[2] + ) + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + self._last_challenge: _DigestAuthChallenge | None = None + self._nonce_count = 1 + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + if self._last_challenge: + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + self._last_challenge = self._parse_challenge(request, response, auth_header) + self._nonce_count = 1 + + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + if response.cookies: + Cookies(response.cookies).set_cookie_header(request=request) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> _DigestAuthChallenge: + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: _DigestAuthChallenge + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nc_value = b"%08x" % self._nonce_count + cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) + self._nonce_count += 1 + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + # Following RFC 2069 + digest_data = [HA1, challenge.nonce, HA2] + else: + # Following RFC 2617/7616 + digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2] + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join(digest_data)), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: bytes | None + qop: bytes | None diff --git a/.venv/Lib/site-packages/httpx/_client.py b/.venv/Lib/site-packages/httpx/_client.py new file mode 100644 index 00000000..e2c6702e --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_client.py @@ -0,0 +1,2052 @@ +from __future__ import annotations + +import datetime +import enum +import logging +import typing +import warnings +from contextlib import asynccontextmanager, contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.wsgi import WSGITransport +from ._types import ( + AsyncByteStream, + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + SyncByteStream, + TimeoutTypes, + URLTypes, + VerifyTypes, +) +from ._urls import URL, QueryParams +from ._utils import ( + Timer, + URLPattern, + get_environment_proxies, + is_https_redirect, + same_origin, +) + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = logging.getLogger("httpx") + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + seconds = self._timer.sync_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + seconds = await self._timer.async_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + await self._stream.aclose() + + +EventHook = typing.Callable[..., typing.Any] + + +class BaseClient: + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URLTypes = "", + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._default_encoding = default_encoding + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxies: ProxiesTypes | None, allow_env_proxies: bool + ) -> dict[str, Proxy | None]: + if proxies is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + if isinstance(proxies, dict): + new_proxies = {} + for key, value in proxies.items(): + proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value + new_proxies[str(key)] = proxy + return new_proxies + else: + proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> dict[str, list[EventHook]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> Auth | None: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URLTypes) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + extensions = {} if extensions is None else extensions + if "timeout" not in extensions: + timeout = ( + self.timeout + if isinstance(timeout, UseClientDefault) + else Timeout(timeout) + ) + extensions = dict(**extensions, timeout=timeout.as_dict()) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + extensions=extensions, + ) + + def _merge_url(self, url: URLTypes) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # separator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: QueryParamTypes | None = None + ) -> QueryParamTypes | None: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + return merged_queryparams.merge(params) + return params + + def _build_auth(self, auth: AuthTypes | None) -> Auth | None: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, + url=url, + headers=headers, + cookies=cookies, + stream=stream, + extensions=request.extensions, + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not same_origin(url, request.url): + if not is_https_redirect(request.url, url): + # Strip Authorization headers when responses are redirected + # away from the origin. (Except for direct HTTP to HTTPS redirects.) + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> SyncByteStream | AsyncByteStream | None: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + It can be shared between threads. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An WSGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + mounts: None | (typing.Mapping[str, BaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URLTypes = "", + transport: BaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + if proxies: + message = ( + "The 'proxies' argument is now deprecated." + " Use 'proxy' or 'mounts' instead." + ) + warnings.warn(message, DeprecationWarning) + if proxy: + raise RuntimeError("Use either `proxy` or 'proxies', not both.") + + if app: + message = ( + "The 'app' shortcut is now deprecated." + " Use the explicit style 'transport=WSGITransport(app=...)' instead." + ) + warnings.warn(message, DeprecationWarning) + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + self._mounts: dict[URLPattern, BaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: BaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + ) -> BaseTransport: + if transport is not None: + return transport + + if app is not None: + return WSGITransport(app=app) + + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return self.send(request, auth=auth, follow_redirects=follow_redirects) + + @contextmanager + def stream( + self, + method: str, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + while True: + response = self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except BaseException as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + hook(response) + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + response.read() + else: + response.next_request = request + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + timer.sync_start() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + response = transport.handle_request(request) + + assert isinstance(response.stream, SyncByteStream) + + response.request = request + response.stream = BoundSyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + def get( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def options( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def head( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def post( + self, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def put( + self, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def patch( + self, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def delete( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + It can be shared between tasks. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None + | (typing.Mapping[str, list[typing.Callable[..., typing.Any]]]) = None, + base_url: URLTypes = "", + transport: AsyncBaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + if proxies: + message = ( + "The 'proxies' argument is now deprecated." + " Use 'proxy' or 'mounts' instead." + ) + warnings.warn(message, DeprecationWarning) + if proxy: + raise RuntimeError("Use either `proxy` or 'proxies', not both.") + + if app: + message = ( + "The 'app' shortcut is now deprecated." + " Use the explicit style 'transport=ASGITransport(app=...)' instead." + ) + warnings.warn(message, DeprecationWarning) + + allow_env_proxies = trust_env and transport is None + proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + + self._mounts: dict[URLPattern, AsyncBaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: AsyncBaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + if app is not None: + return ASGITransport(app=app) + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + + if cookies is not None: # pragma: no cover + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return await self.send(request, auth=auth, follow_redirects=follow_redirects) + + @asynccontextmanager + async def stream( + self, + method: str, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = await self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + while True: + response = await self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except BaseException as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + await hook(response) + + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + await timer.async_start() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + response = await transport.handle_async_request(request) + + assert isinstance(response.stream, AsyncByteStream) + response.request = request + response.stream = BoundAsyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + async def get( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def options( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def head( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def post( + self, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def put( + self, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def patch( + self, + url: URLTypes, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def delete( + self, + url: URLTypes, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/.venv/Lib/site-packages/httpx/_compat.py b/.venv/Lib/site-packages/httpx/_compat.py new file mode 100644 index 00000000..493e6210 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_compat.py @@ -0,0 +1,41 @@ +""" +The _compat module is used for code which requires branching between different +Python environments. It is excluded from the code coverage checks. +""" +import ssl +import sys + +# Brotli support is optional +# The C bindings in `brotli` are recommended for CPython. +# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. +try: + import brotlicffi as brotli +except ImportError: # pragma: no cover + try: + import brotli + except ImportError: + brotli = None + +if sys.version_info >= (3, 10) or ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7): + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of + # 'SSLContext.minimum_version' from Python 3.7 onwards, however + # this attribute is not available unless the ssl module is compiled + # with OpenSSL 1.1.0g or newer. + # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version + # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version + context.minimum_version = ssl.TLSVersion.TLSv1_2 + +else: + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # If 'minimum_version' isn't available, we configure these options with + # the older deprecated variants. + context.options |= ssl.OP_NO_SSLv2 + context.options |= ssl.OP_NO_SSLv3 + context.options |= ssl.OP_NO_TLSv1 + context.options |= ssl.OP_NO_TLSv1_1 + + +__all__ = ["brotli", "set_minimum_tls_version_1_2"] diff --git a/.venv/Lib/site-packages/httpx/_config.py b/.venv/Lib/site-packages/httpx/_config.py new file mode 100644 index 00000000..7636a5dc --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_config.py @@ -0,0 +1,370 @@ +from __future__ import annotations + +import logging +import os +import ssl +import typing +from pathlib import Path + +import certifi + +from ._compat import set_minimum_tls_version_1_2 +from ._models import Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes +from ._urls import URL +from ._utils import get_ca_bundle_from_env + +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + + +logger = logging.getLogger("httpx") + + +class UnsetType: + pass # pragma: no cover + + +UNSET = UnsetType() + + +def create_ssl_context( + cert: CertTypes | None = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, +) -> ssl.SSLContext: + return SSLConfig( + cert=cert, verify=verify, trust_env=trust_env, http2=http2 + ).ssl_context + + +class SSLConfig: + """ + SSL Configuration. + """ + + DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) + + def __init__( + self, + *, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, + ) -> None: + self.cert = cert + self.verify = verify + self.trust_env = trust_env + self.http2 = http2 + self.ssl_context = self.load_ssl_context() + + def load_ssl_context(self) -> ssl.SSLContext: + logger.debug( + "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r", + self.verify, + self.cert, + self.trust_env, + self.http2, + ) + + if self.verify: + return self.load_ssl_context_verify() + return self.load_ssl_context_no_verify() + + def load_ssl_context_no_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for unverified connections. + """ + context = self._create_default_ssl_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + self._load_client_certs(context) + return context + + def load_ssl_context_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for verified connections. + """ + if self.trust_env and self.verify is True: + ca_bundle = get_ca_bundle_from_env() + if ca_bundle is not None: + self.verify = ca_bundle + + if isinstance(self.verify, ssl.SSLContext): + # Allow passing in our own SSLContext object that's pre-configured. + context = self.verify + self._load_client_certs(context) + return context + elif isinstance(self.verify, bool): + ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH + elif Path(self.verify).exists(): + ca_bundle_path = Path(self.verify) + else: + raise IOError( + "Could not find a suitable TLS CA certificate bundle, " + "invalid path: {}".format(self.verify) + ) + + context = self._create_default_ssl_context() + context.verify_mode = ssl.CERT_REQUIRED + context.check_hostname = True + + # Signal to server support for PHA in TLS 1.3. Raises an + # AttributeError if only read-only access is implemented. + try: + context.post_handshake_auth = True + except AttributeError: # pragma: no cover + pass + + # Disable using 'commonName' for SSLContext.check_hostname + # when the 'subjectAltName' extension isn't available. + try: + context.hostname_checks_common_name = False + except AttributeError: # pragma: no cover + pass + + if ca_bundle_path.is_file(): + cafile = str(ca_bundle_path) + logger.debug("load_verify_locations cafile=%r", cafile) + context.load_verify_locations(cafile=cafile) + elif ca_bundle_path.is_dir(): + capath = str(ca_bundle_path) + logger.debug("load_verify_locations capath=%r", capath) + context.load_verify_locations(capath=capath) + + self._load_client_certs(context) + + return context + + def _create_default_ssl_context(self) -> ssl.SSLContext: + """ + Creates the default SSLContext object that's used for both verified + and unverified connections. + """ + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + set_minimum_tls_version_1_2(context) + context.options |= ssl.OP_NO_COMPRESSION + context.set_ciphers(DEFAULT_CIPHERS) + + if ssl.HAS_ALPN: + alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] + context.set_alpn_protocols(alpn_idents) + + keylogfile = os.environ.get("SSLKEYLOGFILE") + if keylogfile and self.trust_env: + context.keylog_filename = keylogfile + + return context + + def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: + """ + Loads client certificates into our SSLContext object + """ + if self.cert is not None: + if isinstance(self.cert, str): + ssl_context.load_cert_chain(certfile=self.cert) + elif isinstance(self.cert, tuple) and len(self.cert) == 2: + ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) + elif isinstance(self.cert, tuple) and len(self.cert) == 3: + ssl_context.load_cert_chain( + certfile=self.cert[0], + keyfile=self.cert[1], + password=self.cert[2], + ) + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: TimeoutTypes | UnsetType = UNSET, + *, + connect: None | float | UnsetType = UNSET, + read: None | float | UnsetType = UNSET, + write: None | float | UnsetType = UNSET, + pool: None | float | UnsetType = UNSET, + ) -> None: + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> dict[str, float | None]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. + """ + + def __init__( + self, + *, + max_connections: int | None = None, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = 5.0, + ) -> None: + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__( + self, + url: URLTypes, + *, + ssl_context: ssl.SSLContext | None = None, + auth: tuple[str, str] | None = None, + headers: HeaderTypes | None = None, + ) -> None: + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https", "socks5"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + + if url.username or url.password: + # Remove any auth credentials from the URL. + auth = (url.username, url.password) + url = url.copy_with(username=None, password=None) + + self.url = url + self.auth = auth + self.headers = headers + self.ssl_context = ssl_context + + @property + def raw_auth(self) -> tuple[bytes, bytes] | None: + # The proxy authentication as raw bytes. + return ( + None + if self.auth is None + else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) + ) + + def __repr__(self) -> str: + # The authentication is represented with the password component masked. + auth = (self.auth[0], "********") if self.auth else None + + # Build a nice concise representation. + url_str = f"{str(self.url)!r}" + auth_str = f", auth={auth!r}" if auth else "" + headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" + return f"Proxy({url_str}{auth_str}{headers_str})" + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/.venv/Lib/site-packages/httpx/_content.py b/.venv/Lib/site-packages/httpx/_content.py new file mode 100644 index 00000000..10b574bb --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_content.py @@ -0,0 +1,236 @@ +from __future__ import annotations + +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + Mapping, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._types import ( + AsyncByteStream, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + SyncByteStream, +) +from ._utils import peek_filelike_length, primitive_value_to_str + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: Iterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "read"): + # File-like interfaces should use 'read' directly. + chunk = self._stream.read(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = self._stream.read(self.CHUNK_SIZE) + else: + # Otherwise iterate. + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: AsyncIterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "aread"): + # File-like interfaces should use 'aread' directly. + chunk = await self._stream.aread(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = await self._stream.aread(self.CHUNK_SIZE) + else: + # Otherwise iterate. + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: no cover + + +def encode_content( + content: str | bytes | Iterable[bytes] | AsyncIterable[bytes], +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable) and not isinstance(content, dict): + # `not isinstance(content, dict)` is a bit oddly specific, but it + # catches a case that's easy for users to make in error, and would + # otherwise pass through here, like any other bytes-iterable, + # because `dict` happens to be iterable. See issue #2491. + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: RequestData, +) -> tuple[dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: RequestData, files: RequestFiles, boundary: bytes | None +) -> tuple[dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> tuple[dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> tuple[dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]: + body = json_dumps(json).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: Any | None = None, + boundary: bytes | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, Mapping): + # We prefer to separate `content=` + # for raw request content, and `data=
` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: Any | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/.venv/Lib/site-packages/httpx/_decoders.py b/.venv/Lib/site-packages/httpx/_decoders.py new file mode 100644 index 00000000..31c72c7f --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_decoders.py @@ -0,0 +1,329 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" +from __future__ import annotations + +import codecs +import io +import typing +import zlib + +from ._compat import brotli +from ._exceptions import DecodingError + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: no cover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: no cover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotli is None: # pragma: no cover + raise ImportError( + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotli.Decompressor() + self.seen_data = False + self._decompress: typing.Callable[[bytes], bytes] + if hasattr(self.decompressor, "decompress"): + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: no cover + else: + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: no cover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self._decompress(data) + except brotli.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: no cover + return b"" + except brotli.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> list[bytes]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> list[str]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: str = "utf-8") -> None: + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") + + def decode(self, data: bytes) -> str: + return self.decoder.decode(data) + + def flush(self) -> str: + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Has the same behaviour as the stdllib splitlines, + but handling the input iteratively. + """ + + def __init__(self) -> None: + self.buffer: list[str] = [] + self.trailing_cr: bool = False + + def decode(self, text: str) -> list[str]: + # See https://docs.python.org/3/library/stdtypes.html#str.splitlines + NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" + + # We always push a trailing `\r` into the next decode iteration. + if self.trailing_cr: + text = "\r" + text + self.trailing_cr = False + if text.endswith("\r"): + self.trailing_cr = True + text = text[:-1] + + if not text: + # NOTE: the edge case input of empty text doesn't occur in practice, + # because other httpx internals filter out this value + return [] # pragma: no cover + + trailing_newline = text[-1] in NEWLINE_CHARS + lines = text.splitlines() + + if len(lines) == 1 and not trailing_newline: + # No new lines, buffer the input and continue. + self.buffer.append(lines[0]) + return [] + + if self.buffer: + # Include any existing buffer in the first portion of the + # splitlines result. + lines = ["".join(self.buffer) + lines[0]] + lines[1:] + self.buffer = [] + + if not trailing_newline: + # If the last segment of splitlines is not newline terminated, + # then drop it from our output and start a new buffer. + self.buffer = [lines.pop()] + + return lines + + def flush(self) -> list[str]: + if not self.buffer and not self.trailing_cr: + return [] + + lines = ["".join(self.buffer)] + self.buffer = [] + self.trailing_cr = False + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, +} + + +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: no cover diff --git a/.venv/Lib/site-packages/httpx/_exceptions.py b/.venv/Lib/site-packages/httpx/_exceptions.py new file mode 100644 index 00000000..11424621 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_exceptions.py @@ -0,0 +1,347 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" +from __future__ import annotations + +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: no cover + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + self._request: Request | None = None + + @property + def request(self) -> Request: + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: Request) -> None: + self._request = request + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__(self, message: str, *, request: Request | None = None) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For example, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__(self, message: str, *, request: Request, response: Response) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming response content," + " without having called `read()`." + ) + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming request content," + " without having called `read()`." + ) + super().__init__(message) + + +@contextlib.contextmanager +def request_context( + request: Request | None = None, +) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/.venv/Lib/site-packages/httpx/_main.py b/.venv/Lib/site-packages/httpx/_main.py new file mode 100644 index 00000000..72657f8c --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_main.py @@ -0,0 +1,509 @@ +from __future__ import annotations + +import functools +import json +import sys +import typing + +import click +import httpcore +import pygments.lexers +import pygments.util +import rich.console +import rich.markup +import rich.progress +import rich.syntax +import rich.table + +from ._client import Client +from ._exceptions import RequestError +from ._models import Response +from ._status_codes import codes + + +def print_help() -> None: + console = rich.console.Console() + + console.print("[bold]HTTPX :butterfly:", justify="center") + console.print() + console.print("A next generation HTTP client.", justify="center") + console.print() + console.print( + "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" + ) + console.print() + + table = rich.table.Table.grid(padding=1, pad_edge=True) + table.add_column("Parameter", no_wrap=True, justify="left", style="bold") + table.add_column("Description") + table.add_row( + "-m, --method [cyan]METHOD", + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" + "[Default: GET, or POST if a request body is included]", + ) + table.add_row( + "-p, --params [cyan] ...", + "Query parameters to include in the request URL.", + ) + table.add_row( + "-c, --content [cyan]TEXT", "Byte content to include in the request body." + ) + table.add_row( + "-d, --data [cyan] ...", "Form data to include in the request body." + ) + table.add_row( + "-f, --files [cyan] ...", + "Form files to include in the request body.", + ) + table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") + table.add_row( + "-h, --headers [cyan] ...", + "Include additional HTTP headers in the request.", + ) + table.add_row( + "--cookies [cyan] ...", "Cookies to include in the request." + ) + table.add_row( + "--auth [cyan]", + "Username and password to include in the request. Specify '-' for the password" + " to use a password prompt. Note that using --verbose/-v will expose" + " the Authorization header, including the password encoding" + " in a trivially reversible format.", + ) + + table.add_row( + "--proxy [cyan]URL", + "Send the request via a proxy. Should be the URL giving the proxy address.", + ) + + table.add_row( + "--timeout [cyan]FLOAT", + "Timeout value to use for network operations, such as establishing the" + " connection, reading some data, etc... [Default: 5.0]", + ) + + table.add_row("--follow-redirects", "Automatically follow redirects.") + table.add_row("--no-verify", "Disable SSL verification.") + table.add_row( + "--http2", "Send the request using HTTP/2, if the remote server supports it." + ) + + table.add_row( + "--download [cyan]FILE", + "Save the response content as a file, rather than displaying it.", + ) + + table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") + table.add_row("--help", "Show this message and exit.") + console.print(table) + + +def get_lexer_for_response(response: Response) -> str: + content_type = response.headers.get("Content-Type") + if content_type is not None: + mime_type, _, _ = content_type.partition(";") + try: + return typing.cast( + str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name + ) + except pygments.util.ClassNotFound: # pragma: no cover + pass + return "" # pragma: no cover + + +def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: + version = "HTTP/2" if http2 else "HTTP/1.1" + headers = [ + (name.lower() if http2 else name, value) for name, value in request.headers + ] + method = request.method.decode("ascii") + target = request.url.target.decode("ascii") + lines = [f"{method} {target} {version}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def format_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> str: + version = http_version.decode("ascii") + reason = ( + codes.get_reason_phrase(status) + if reason_phrase is None + else reason_phrase.decode("ascii") + ) + lines = [f"{version} {status} {reason}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: + console = rich.console.Console() + http_text = format_request_headers(request, http2=http2) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> None: + console = rich.console.Console() + http_text = format_response_headers(http_version, status, reason_phrase, headers) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response(response: Response) -> None: + console = rich.console.Console() + lexer_name = get_lexer_for_response(response) + if lexer_name: + if lexer_name.lower() == "json": + try: + data = response.json() + text = json.dumps(data, indent=4) + except ValueError: # pragma: no cover + text = response.text + else: + text = response.text + + syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) + console.print(syntax) + else: + console.print(f"<{len(response.content)} bytes of binary data>") + + +_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] +_PCTRTTT = typing.Tuple[_PCTRTT, ...] +_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] + + +def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover + lines = [] + for key, value in cert.items(): + if isinstance(value, (list, tuple)): + lines.append(f"* {key}:") + for item in value: + if key in ("subject", "issuer"): + for sub_item in item: + lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") + elif isinstance(item, tuple) and len(item) == 2: + lines.append(f"* {item[0]}: {item[1]!r}") + else: + lines.append(f"* {item!r}") + else: + lines.append(f"* {key}: {value!r}") + return "\n".join(lines) + + +def trace( + name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False +) -> None: + console = rich.console.Console() + if name == "connection.connect_tcp.started" and verbose: + host = info["host"] + console.print(f"* Connecting to {host!r}") + elif name == "connection.connect_tcp.complete" and verbose: + stream = info["return_value"] + server_addr = stream.get_extra_info("server_addr") + console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") + elif name == "connection.start_tls.complete" and verbose: # pragma: no cover + stream = info["return_value"] + ssl_object = stream.get_extra_info("ssl_object") + version = ssl_object.version() + cipher = ssl_object.cipher() + server_cert = ssl_object.getpeercert() + alpn = ssl_object.selected_alpn_protocol() + console.print(f"* SSL established using {version!r} / {cipher[0]!r}") + console.print(f"* Selected ALPN protocol: {alpn!r}") + if server_cert: + console.print("* Server certificate:") + console.print(format_certificate(server_cert)) + elif name == "http11.send_request_headers.started" and verbose: + request = info["request"] + print_request_headers(request, http2=False) + elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover + request = info["request"] + print_request_headers(request, http2=True) + elif name == "http11.receive_response_headers.complete": + http_version, status, reason_phrase, headers = info["return_value"] + print_response_headers(http_version, status, reason_phrase, headers) + elif name == "http2.receive_response_headers.complete": # pragma: no cover + status, headers = info["return_value"] + http_version = b"HTTP/2" + reason_phrase = None + print_response_headers(http_version, status, reason_phrase, headers) + + +def download_response(response: Response, download: typing.BinaryIO) -> None: + console = rich.console.Console() + console.print() + content_length = response.headers.get("Content-Length") + with rich.progress.Progress( + "[progress.description]{task.description}", + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + description = f"Downloading [bold]{rich.markup.escape(download.name)}" + download_task = progress.add_task( + description, + total=int(content_length or 0), + start=content_length is not None, + ) + for chunk in response.iter_bytes(): + download.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) + + +def validate_json( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: # pragma: no cover + raise click.BadParameter("Not valid JSON") + + +def validate_auth( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value == (None, None): + return None + + username, password = value + if password == "-": # pragma: no cover + password = click.prompt("Password", hide_input=True) + return (username, password) + + +def handle_help( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> None: + if not value or ctx.resilient_parsing: + return + + print_help() + ctx.exit() + + +@click.command(add_help_option=False) +@click.argument("url", type=str) +@click.option( + "--method", + "-m", + "method", + type=str, + help=( + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " + "[Default: GET, or POST if a request body is included]" + ), +) +@click.option( + "--params", + "-p", + "params", + type=(str, str), + multiple=True, + help="Query parameters to include in the request URL.", +) +@click.option( + "--content", + "-c", + "content", + type=str, + help="Byte content to include in the request body.", +) +@click.option( + "--data", + "-d", + "data", + type=(str, str), + multiple=True, + help="Form data to include in the request body.", +) +@click.option( + "--files", + "-f", + "files", + type=(str, click.File(mode="rb")), + multiple=True, + help="Form files to include in the request body.", +) +@click.option( + "--json", + "-j", + "json", + type=str, + callback=validate_json, + help="JSON data to include in the request body.", +) +@click.option( + "--headers", + "-h", + "headers", + type=(str, str), + multiple=True, + help="Include additional HTTP headers in the request.", +) +@click.option( + "--cookies", + "cookies", + type=(str, str), + multiple=True, + help="Cookies to include in the request.", +) +@click.option( + "--auth", + "auth", + type=(str, str), + default=(None, None), + callback=validate_auth, + help=( + "Username and password to include in the request. " + "Specify '-' for the password to use a password prompt. " + "Note that using --verbose/-v will expose the Authorization header, " + "including the password encoding in a trivially reversible format." + ), +) +@click.option( + "--proxy", + "proxy", + type=str, + default=None, + help="Send the request via a proxy. Should be the URL giving the proxy address.", +) +@click.option( + "--timeout", + "timeout", + type=float, + default=5.0, + help=( + "Timeout value to use for network operations, such as establishing the " + "connection, reading some data, etc... [Default: 5.0]" + ), +) +@click.option( + "--follow-redirects", + "follow_redirects", + is_flag=True, + default=False, + help="Automatically follow redirects.", +) +@click.option( + "--no-verify", + "verify", + is_flag=True, + default=True, + help="Disable SSL verification.", +) +@click.option( + "--http2", + "http2", + type=bool, + is_flag=True, + default=False, + help="Send the request using HTTP/2, if the remote server supports it.", +) +@click.option( + "--download", + type=click.File("wb"), + help="Save the response content as a file, rather than displaying it.", +) +@click.option( + "--verbose", + "-v", + type=bool, + is_flag=True, + default=False, + help="Verbose. Show request as well as response.", +) +@click.option( + "--help", + is_flag=True, + is_eager=True, + expose_value=False, + callback=handle_help, + help="Show this message and exit.", +) +def main( + url: str, + method: str, + params: list[tuple[str, str]], + content: str, + data: list[tuple[str, str]], + files: list[tuple[str, click.File]], + json: str, + headers: list[tuple[str, str]], + cookies: list[tuple[str, str]], + auth: tuple[str, str] | None, + proxy: str, + timeout: float, + follow_redirects: bool, + verify: bool, + http2: bool, + download: typing.BinaryIO | None, + verbose: bool, +) -> None: + """ + An HTTP command line client. + Sends a request and displays the response. + """ + if not method: + method = "POST" if content or data or files or json else "GET" + + try: + with Client( + proxy=proxy, + timeout=timeout, + verify=verify, + http2=http2, + ) as client: + with client.stream( + method, + url, + params=list(params), + content=content, + data=dict(data), + files=files, # type: ignore + json=json, + headers=headers, + cookies=dict(cookies), + auth=auth, + follow_redirects=follow_redirects, + extensions={"trace": functools.partial(trace, verbose=verbose)}, + ) as response: + if download is not None: + download_response(response, download) + else: + response.read() + if response.content: + print_response(response) + + except RequestError as exc: + console = rich.console.Console() + console.print(f"[red]{type(exc).__name__}[/red]: {exc}") + sys.exit(1) + + sys.exit(0 if response.is_success else 1) diff --git a/.venv/Lib/site-packages/httpx/_models.py b/.venv/Lib/site-packages/httpx/_models.py new file mode 100644 index 00000000..cd76705f --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_models.py @@ -0,0 +1,1209 @@ +from __future__ import annotations + +import datetime +import email.message +import json as jsonlib +import typing +import urllib.request +from collections.abc import Mapping +from http.cookiejar import Cookie, CookieJar + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._multipart import get_multipart_boundary_from_content_type +from ._status_codes import codes +from ._types import ( + AsyncByteStream, + CookieTypes, + HeaderTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + ResponseContent, + ResponseExtensions, + SyncByteStream, +) +from ._urls import URL +from ._utils import ( + is_known_encoding, + normalize_header_key, + normalize_header_value, + obfuscate_sensitive_headers, + parse_content_type_charset, + parse_header_links, +) + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__( + self, + headers: HeaderTypes | None = None, + encoding: str | None = None, + ) -> None: + if headers is None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + elif isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, Mapping): + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers.items() + ] + else: + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers + ] + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> list[tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma separated value when a key occurs multiple times. + """ + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurrences of the same key without concatenating into a single + comma separated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurrences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> list[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma separated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore + headers = Headers(headers) + for key in headers.keys(): + if key in self: + self.pop(key) + self._list.extend(headers._list) + + def copy(self) -> Headers: + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: str | bytes, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + stream: SyncByteStream | AsyncByteStream | None = None, + extensions: RequestExtensions | None = None, + ) -> None: + self.method = ( + method.decode("ascii").upper() + if isinstance(method, bytes) + else method.upper() + ) + self.url = URL(url) + if params is not None: + self.url = self.url.copy_merge_params(params=params) + self.headers = Headers(headers) + self.extensions = {} if extensions is None else extensions + + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + content_type: str | None = self.headers.get("content-type") + headers, stream = encode_request( + content=content, + data=data, + files=files, + json=json, + boundary=get_multipart_boundary_from_content_type( + content_type=content_type.encode(self.headers.encoding) + if content_type + else None + ), + ) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* + # auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: list[tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.extensions = {} + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: HeaderTypes | None = None, + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: typing.Any = None, + stream: SyncByteStream | AsyncByteStream | None = None, + request: Request | None = None, + extensions: ResponseExtensions | None = None, + history: list[Response] | None = None, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + self.status_code = status_code + self.headers = Headers(headers) + + self._request: Request | None = request + + # When follow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: Request | None = None + + self.extensions: ResponseExtensions = {} if extensions is None else extensions + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + self.default_encoding = default_encoding + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + http_version: bytes = self.extensions["http_version"] + except KeyError: + return "HTTP/1.1" + else: + return http_version.decode("ascii", errors="ignore") + + @property + def reason_phrase(self) -> str: + try: + reason_phrase: bytes = self.extensions["reason_phrase"] + except KeyError: + return codes.get_reason_phrase(self.status_code) + else: + return reason_phrase.decode("ascii", errors="ignore") + + @property + def url(self) -> URL: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding or "utf-8") + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> str | None: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not is_known_encoding(encoding): + if isinstance(self.default_encoding, str): + encoding = self.default_encoding + elif hasattr(self, "_content"): + encoding = self.default_encoding(self._content) + self._encoding = encoding or "utf-8" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + """ + Set the encoding to use for decoding the byte content into text. + + If the `text` attribute has been accessed, attempting to set the + encoding will throw a ValueError. + """ + if hasattr(self, "_text"): + raise ValueError( + "Setting encoding after `text` has been accessed is not allowed." + ) + self._encoding = value + + @property + def charset_encoding(self) -> str | None: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + return parse_content_type_charset(content_type) + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: list[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_informational(self) -> bool: + """ + A property which is `True` for 1xx status codes, `False` otherwise. + """ + return codes.is_informational(self.status_code) + + @property + def is_success(self) -> bool: + """ + A property which is `True` for 2xx status codes, `False` otherwise. + """ + return codes.is_success(self.status_code) + + @property + def is_redirect(self) -> bool: + """ + A property which is `True` for 3xx status codes, `False` otherwise. + + Note that not all responses with a 3xx status code indicate a URL redirect. + + Use `response.has_redirect_location` to determine responses with a properly + formed URL redirection. + """ + return codes.is_redirect(self.status_code) + + @property + def is_client_error(self) -> bool: + """ + A property which is `True` for 4xx status codes, `False` otherwise. + """ + return codes.is_client_error(self.status_code) + + @property + def is_server_error(self) -> bool: + """ + A property which is `True` for 5xx status codes, `False` otherwise. + """ + return codes.is_server_error(self.status_code) + + @property + def is_error(self) -> bool: + """ + A property which is `True` for 4xx and 5xx status codes, `False` otherwise. + """ + return codes.is_error(self.status_code) + + @property + def has_redirect_location(self) -> bool: + """ + Returns True for 3xx responses with a properly formed URL redirection, + `False` otherwise. + """ + return ( + self.status_code + in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + and "Location" in self.headers + ) + + def raise_for_status(self) -> Response: + """ + Raise the `HTTPStatusError` if one occurred. + """ + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if self.is_success: + return self + + if self.has_redirect_location: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "Redirect location: '{0.headers[location]}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + else: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + + status_class = self.status_code // 100 + error_types = { + 1: "Informational response", + 3: "Redirect response", + 4: "Client error", + 5: "Server error", + } + error_type = error_types.get(status_class, "Invalid status code") + message = message.format(self, error_type=error_type) + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + return jsonlib.loads(self.content, **kwargs) + + @property + def cookies(self) -> Cookies: + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> dict[str | None, dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + if header is None: + return {} + + return { + (link.get("rel") or link.get("url")): link + for link in parse_header_links(header) + } + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.extensions = {} + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_text( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(typing.MutableMapping[str, str]): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: CookieTypes | None = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, + name: str, + default: str | None = None, + domain: str | None = None, + path: str | None = None, + ) -> str | None: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete( + self, + name: str, + domain: str | None = None, + path: str | None = None, + ) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear(self, domain: str | None = None, path: str | None = None) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response) -> None: + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/.venv/Lib/site-packages/httpx/_multipart.py b/.venv/Lib/site-packages/httpx/_multipart.py new file mode 100644 index 00000000..8edb6227 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_multipart.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +import io +import os +import typing +from pathlib import Path + +from ._types import ( + AsyncByteStream, + FileContent, + FileTypes, + RequestData, + RequestFiles, + SyncByteStream, +) +from ._utils import ( + format_form_param, + guess_content_type, + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + + +def get_multipart_boundary_from_content_type( + content_type: bytes | None, +) -> bytes | None: + if not content_type or not content_type.startswith(b"multipart/form-data"): + return None + # parse boundary according to + # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 + if b";" in content_type: + for section in content_type.split(b";"): + if section.strip().lower().startswith(b"boundary="): + return section.strip()[len(b"boundary=") :].strip(b'"') + return None + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__(self, name: str, value: str | bytes | int | float | None) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + "Invalid type for value. Expected primitive type," + f" got {type(value)}: {value!r}" + ) + self.name = name + self.value: str | bytes = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + CHUNK_SIZE = 64 * 1024 + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + headers: dict[str, str] = {} + content_type: str | None = None + + # This large tuple based API largely mirror's requests' API + # It would be good to think of better APIs for this that we could + # include in httpx 2.0 since variable length tuples(especially of 4 elements) + # are quite unwieldly + if isinstance(value, tuple): + if len(value) == 2: + # neither the 3rd parameter (content_type) nor the 4th (headers) + # was included + filename, fileobj = value + elif len(value) == 3: + filename, fileobj, content_type = value + else: + # all 4 parameters included + filename, fileobj, content_type, headers = value # type: ignore + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + + if content_type is None: + content_type = guess_content_type(filename) + + has_content_type_header = any("content-type" in key.lower() for key in headers) + if content_type is not None and not has_content_type_header: + # note that unlike requests, we ignore the content_type provided in the 3rd + # tuple element if it is also included in the headers requests does + # the opposite (it overwrites the headerwith the 3rd tuple element) + headers["Content-Type"] = content_type + + if isinstance(fileobj, io.StringIO): + raise TypeError( + "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." + ) + if isinstance(fileobj, io.TextIOBase): + raise TypeError( + "Multipart file uploads must be opened in binary mode, not text mode." + ) + + self.filename = filename + self.file = fileobj + self.headers = headers + + def get_length(self) -> int | None: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + file_length = peek_filelike_length(self.file) + + # If we can't determine the filesize without reading it into memory, + # then return `None` here, to indicate an unknown file length. + if file_length is None: + return None + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + format_form_param("name", self.name), + ] + if self.filename: + filename = format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + for header_name, header_value in self.headers.items(): + key, val = f"\r\n{header_name}: ".encode(), header_value.encode() + parts.extend([key, val]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self.file, "seek"): + try: + self.file.seek(0) + except io.UnsupportedOperation: + pass + + chunk = self.file.read(self.CHUNK_SIZE) + while chunk: + yield to_bytes(chunk) + chunk = self.file.read(self.CHUNK_SIZE) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__( + self, + data: RequestData, + files: RequestFiles, + boundary: bytes | None = None, + ) -> None: + if boundary is None: + boundary = os.urandom(16).hex().encode("ascii") + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: RequestData, files: RequestFiles + ) -> typing.Iterator[FileField | DataField]: + for name, value in data.items(): + if isinstance(value, (tuple, list)): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def get_content_length(self) -> int | None: + """ + Return the length of the multipart encoded content, or `None` if + any of the files have a length that cannot be determined upfront. + """ + boundary_length = len(self.boundary) + length = 0 + + for field in self.fields: + field_length = field.get_length() + if field_length is None: + return None + + length += 2 + boundary_length + 2 # b"--{boundary}\r\n" + length += field_length + length += 2 # b"\r\n" + + length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" + return length + + # Content stream interface. + + def get_headers(self) -> dict[str, str]: + content_length = self.get_content_length() + content_type = self.content_type + if content_length is None: + return {"Transfer-Encoding": "chunked", "Content-Type": content_type} + return {"Content-Length": str(content_length), "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/.venv/Lib/site-packages/httpx/_status_codes.py b/.venv/Lib/site-packages/httpx/_status_codes.py new file mode 100644 index 00000000..4cde4e68 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_status_codes.py @@ -0,0 +1,160 @@ +from __future__ import annotations + +from enum import IntEnum + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> codes: + obj = int.__new__(cls, value) + obj._value_ = value + + obj.phrase = phrase # type: ignore[attr-defined] + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_informational(cls, value: int) -> bool: + """ + Returns `True` for 1xx status codes, `False` otherwise. + """ + return 100 <= value <= 199 + + @classmethod + def is_success(cls, value: int) -> bool: + """ + Returns `True` for 2xx status codes, `False` otherwise. + """ + return 200 <= value <= 299 + + @classmethod + def is_redirect(cls, value: int) -> bool: + """ + Returns `True` for 3xx status codes, `False` otherwise. + """ + return 300 <= value <= 399 + + @classmethod + def is_client_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx status codes, `False` otherwise. + """ + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + """ + Returns `True` for 5xx status codes, `False` otherwise. + """ + return 500 <= value <= 599 + + @classmethod + def is_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx or 5xx status codes, `False` otherwise. + """ + return 400 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/.venv/Lib/site-packages/httpx/_transports/__init__.py b/.venv/Lib/site-packages/httpx/_transports/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..b7aa8c8e Binary files /dev/null and b/.venv/Lib/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc b/.venv/Lib/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc new file mode 100644 index 00000000..6ffbb6f7 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc new file mode 100644 index 00000000..21fb5d91 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc b/.venv/Lib/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc new file mode 100644 index 00000000..a4ac2133 Binary files /dev/null and b/.venv/Lib/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc b/.venv/Lib/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc new file mode 100644 index 00000000..2b28e0cf Binary files /dev/null and b/.venv/Lib/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc b/.venv/Lib/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc new file mode 100644 index 00000000..bdaa302a Binary files /dev/null and b/.venv/Lib/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/httpx/_transports/asgi.py b/.venv/Lib/site-packages/httpx/_transports/asgi.py new file mode 100644 index 00000000..9543a128 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +import typing + +import sniffio + +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +_Message = typing.Dict[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.Dict[str, typing.Any]], typing.Coroutine[None, None, None] +] +_ASGIApp = typing.Callable[ + [typing.Dict[str, typing.Any], _Receive, _Send], typing.Coroutine[None, None, None] +] + + +def create_event() -> Event: + if sniffio.current_async_library() == "trio": + import trio + + return trio.Event() + else: + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: list[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.AsyncClient(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the ASGITransport class: + + ``` + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: _ASGIApp, + raise_app_exceptions: bool = True, + root_path: str = "", + client: tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path.split(b"?")[0], + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> dict[str, typing.Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: dict[str, typing.Any]) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions: + raise + + response_complete.set() + if status_code is None: + status_code = 500 + if response_headers is None: + response_headers = {} + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + + return Response(status_code, headers=response_headers, stream=stream) diff --git a/.venv/Lib/site-packages/httpx/_transports/base.py b/.venv/Lib/site-packages/httpx/_transports/base.py new file mode 100644 index 00000000..8b6dc3c2 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_transports/base.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/.venv/Lib/site-packages/httpx/_transports/default.py b/.venv/Lib/site-packages/httpx/_transports/default.py new file mode 100644 index 00000000..14476a3c --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_transports/default.py @@ -0,0 +1,385 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int + +Example usages... + +# Disable HTTP/2 on a single specific domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" +from __future__ import annotations + +import contextlib +import typing +from types import TracebackType + +import httpcore + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream, VerifyTypes +from .._urls import URL +from .base import AsyncBaseTransport, BaseTransport + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: no cover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +HTTPCORE_EXC_MAP = { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, +} + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + if hasattr(self._httpcore_stream, "close"): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + + if proxy is None: + self._pool = httpcore.ConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + proxy_ssl_context=proxy.ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', or 'socks5'," + f" but got {proxy.url.scheme!r}." + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = self._pool.handle_request(req) + + assert isinstance(resp.stream, typing.Iterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + if hasattr(self._httpcore_stream, "aclose"): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', or 'socks5'," + " but got {proxy.url.scheme!r}." + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = await self._pool.handle_async_request(req) + + assert isinstance(resp.stream, typing.AsyncIterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/.venv/Lib/site-packages/httpx/_transports/mock.py b/.venv/Lib/site-packages/httpx/_transports/mock.py new file mode 100644 index 00000000..5abea837 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_transports/mock.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: SyncHandler | AsyncHandler) -> None: + self.handler = handler + + def handle_request( + self, + request: Request, + ) -> Response: + request.read() + response = self.handler(request) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response + + async def handle_async_request( + self, + request: Request, + ) -> Response: + await request.aread() + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + if not isinstance(response, Response): + response = await response + + return response diff --git a/.venv/Lib/site-packages/httpx/_transports/wsgi.py b/.venv/Lib/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 00000000..cd03a941 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import io +import itertools +import sys +import typing + +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport + +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover + +_T = typing.TypeVar("_T") + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + def close(self) -> None: + if self._close is not None: + self._close() + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The WSGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: WSGIApplication, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + wsgi_errors: typing.TextIO | None = None, + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors + + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": wsgi_input, + "wsgi.errors": self.wsgi_errors or sys.stderr, + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(port), + "SERVER_PROTOCOL": "HTTP/1.1", + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in request.headers.raw: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, + response_headers: list[tuple[str, str]], + exc_info: OptExcInfo | None = None, + ) -> typing.Callable[[bytes], typing.Any]: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + return lambda _: None + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + + return Response(status_code, headers=headers, stream=stream) diff --git a/.venv/Lib/site-packages/httpx/_types.py b/.venv/Lib/site-packages/httpx/_types.py new file mode 100644 index 00000000..649d101d --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_types.py @@ -0,0 +1,134 @@ +""" +Type definitions for type checking purposes. +""" + +import ssl +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + NamedTuple, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import Cookies, Headers, Request # noqa: F401 + from ._urls import URL, QueryParams # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +RawURL = NamedTuple( + "RawURL", + [ + ("raw_scheme", bytes), + ("raw_host", bytes), + ("port", Optional[int]), + ("raw_path", bytes), + ], +) + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, +] + +HeaderTypes = Union[ + "Headers", + Mapping[str, str], + Mapping[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +CertTypes = Union[ + # certfile + str, + # (certfile, keyfile) + Tuple[str, Optional[str]], + # (certfile, keyfile, password) + Tuple[str, Optional[str], Optional[str]], +] +VerifyTypes = Union[str, bool, ssl.SSLContext] +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxyTypes = Union[URLTypes, "Proxy"] +ProxiesTypes = Union[ProxyTypes, Dict[URLTypes, Union[None, ProxyTypes]]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseExtensions = MutableMapping[str, Any] + +RequestData = Mapping[str, Any] + +FileContent = Union[IO[bytes], bytes, str] +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +RequestExtensions = MutableMapping[str, Any] + + +class SyncByteStream: + def __iter__(self) -> Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + """ + + +class AsyncByteStream: + async def __aiter__(self) -> AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/.venv/Lib/site-packages/httpx/_urlparse.py b/.venv/Lib/site-packages/httpx/_urlparse.py new file mode 100644 index 00000000..6a4b55b3 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_urlparse.py @@ -0,0 +1,502 @@ +""" +An implementation of `urlparse` that provides URL validation and normalization +as described by RFC3986. + +We rely on this implementation rather than the one in Python's stdlib, because: + +* It provides more complete URL validation. +* It properly differentiates between an empty querystring and an absent querystring, + to distinguish URLs with a trailing '?'. +* It handles scheme, hostname, port, and path normalization. +* It supports IDNA hostnames, normalizing them to their encoded form. +* The API supports passing individual components, as well as the complete URL string. + +Previously we relied on the excellent `rfc3986` package to handle URL parsing and +validation, but this module provides a simpler alternative, with less indirection +required. +""" +from __future__ import annotations + +import ipaddress +import re +import typing + +import idna + +from ._exceptions import InvalidURL + +MAX_URL_LENGTH = 65536 + +# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 +UNRESERVED_CHARACTERS = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" +) +SUB_DELIMS = "!$&'()*+,;=" + +PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") + + +# {scheme}: (optional) +# //{authority} (optional) +# {path} +# ?{query} (optional) +# #{fragment} (optional) +URL_REGEX = re.compile( + ( + r"(?:(?P{scheme}):)?" + r"(?://(?P{authority}))?" + r"(?P{path})" + r"(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" + ).format( + scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", + authority="[^/?#]*", + path="[^?#]*", + query="[^#]*", + fragment=".*", + ) +) + +# {userinfo}@ (optional) +# {host} +# :{port} (optional) +AUTHORITY_REGEX = re.compile( + ( + r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" + ).format( + userinfo=".*", # Any character sequence. + host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@', + # or an IPv6 address enclosed within square brackets. + port=".*", # Any character sequence. + ) +) + + +# If we call urlparse with an individual component, then we need to regex +# validate that component individually. +# Note that we're duplicating the same strings as above. Shock! Horror!! +COMPONENT_REGEX = { + "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), + "authority": re.compile("[^/?#]*"), + "path": re.compile("[^?#]*"), + "query": re.compile("[^#]*"), + "fragment": re.compile(".*"), + "userinfo": re.compile("[^@]*"), + "host": re.compile("(\\[.*\\]|[^:]*)"), + "port": re.compile(".*"), +} + + +# We use these simple regexs as a first pass before handing off to +# the stdlib 'ipaddress' module for IP address validation. +IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") +IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") + + +class ParseResult(typing.NamedTuple): + scheme: str + userinfo: str + host: str + port: int | None + path: str + query: str | None + fragment: str | None + + @property + def authority(self) -> str: + return "".join( + [ + f"{self.userinfo}@" if self.userinfo else "", + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + @property + def netloc(self) -> str: + return "".join( + [ + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + def copy_with(self, **kwargs: str | None) -> ParseResult: + if not kwargs: + return self + + defaults = { + "scheme": self.scheme, + "authority": self.authority, + "path": self.path, + "query": self.query, + "fragment": self.fragment, + } + defaults.update(kwargs) + return urlparse("", **defaults) + + def __str__(self) -> str: + authority = self.authority + return "".join( + [ + f"{self.scheme}:" if self.scheme else "", + f"//{authority}" if authority else "", + self.path, + f"?{self.query}" if self.query is not None else "", + f"#{self.fragment}" if self.fragment is not None else "", + ] + ) + + +def urlparse(url: str = "", **kwargs: str | None) -> ParseResult: + # Initial basic checks on allowable URLs. + # --------------------------------------- + + # Hard limit the maximum allowable URL length. + if len(url) > MAX_URL_LENGTH: + raise InvalidURL("URL too long") + + # If a URL includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in url): + raise InvalidURL("Invalid non-printable ASCII character in URL") + + # Some keyword arguments require special handling. + # ------------------------------------------------ + + # Coerce "port" to a string, if it is provided as an integer. + if "port" in kwargs: + port = kwargs["port"] + kwargs["port"] = str(port) if isinstance(port, int) else port + + # Replace "netloc" with "host and "port". + if "netloc" in kwargs: + netloc = kwargs.pop("netloc") or "" + kwargs["host"], _, kwargs["port"] = netloc.partition(":") + + # Replace "username" and/or "password" with "userinfo". + if "username" in kwargs or "password" in kwargs: + username = quote(kwargs.pop("username", "") or "") + password = quote(kwargs.pop("password", "") or "") + kwargs["userinfo"] = f"{username}:{password}" if password else username + + # Replace "raw_path" with "path" and "query". + if "raw_path" in kwargs: + raw_path = kwargs.pop("raw_path") or "" + kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") + if not seperator: + kwargs["query"] = None + + # Ensure that IPv6 "host" addresses are always escaped with "[...]". + if "host" in kwargs: + host = kwargs.get("host") or "" + if ":" in host and not (host.startswith("[") and host.endswith("]")): + kwargs["host"] = f"[{host}]" + + # If any keyword arguments are provided, ensure they are valid. + # ------------------------------------------------------------- + + for key, value in kwargs.items(): + if value is not None: + if len(value) > MAX_URL_LENGTH: + raise InvalidURL(f"URL component '{key}' too long") + + # If a component includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in value): + raise InvalidURL( + f"Invalid non-printable ASCII character in URL component '{key}'" + ) + + # Ensure that keyword arguments match as a valid regex. + if not COMPONENT_REGEX[key].fullmatch(value): + raise InvalidURL(f"Invalid URL component '{key}'") + + # The URL_REGEX will always match, but may have empty components. + url_match = URL_REGEX.match(url) + assert url_match is not None + url_dict = url_match.groupdict() + + # * 'scheme', 'authority', and 'path' may be empty strings. + # * 'query' may be 'None', indicating no trailing "?" portion. + # Any string including the empty string, indicates a trailing "?". + # * 'fragment' may be 'None', indicating no trailing "#" portion. + # Any string including the empty string, indicates a trailing "#". + scheme = kwargs.get("scheme", url_dict["scheme"]) or "" + authority = kwargs.get("authority", url_dict["authority"]) or "" + path = kwargs.get("path", url_dict["path"]) or "" + query = kwargs.get("query", url_dict["query"]) + fragment = kwargs.get("fragment", url_dict["fragment"]) + + # The AUTHORITY_REGEX will always match, but may have empty components. + authority_match = AUTHORITY_REGEX.match(authority) + assert authority_match is not None + authority_dict = authority_match.groupdict() + + # * 'userinfo' and 'host' may be empty strings. + # * 'port' may be 'None'. + userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" + host = kwargs.get("host", authority_dict["host"]) or "" + port = kwargs.get("port", authority_dict["port"]) + + # Normalize and validate each component. + # We end up with a parsed representation of the URL, + # with components that are plain ASCII bytestrings. + parsed_scheme: str = scheme.lower() + parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") + parsed_host: str = encode_host(host) + parsed_port: int | None = normalize_port(port, scheme) + + has_scheme = parsed_scheme != "" + has_authority = ( + parsed_userinfo != "" or parsed_host != "" or parsed_port is not None + ) + validate_path(path, has_scheme=has_scheme, has_authority=has_authority) + if has_authority: + path = normalize_path(path) + + # The GEN_DELIMS set is... : / ? # [ ] @ + # These do not need to be percent-quoted unless they serve as delimiters for the + # specific component. + + # For 'path' we need to drop ? and # from the GEN_DELIMS set. + parsed_path: str = quote(path, safe=SUB_DELIMS + ":/[]@") + # For 'query' we need to drop '#' from the GEN_DELIMS set. + parsed_query: str | None = ( + None if query is None else quote(query, safe=SUB_DELIMS + ":/?[]@") + ) + # For 'fragment' we can include all of the GEN_DELIMS set. + parsed_fragment: str | None = ( + None if fragment is None else quote(fragment, safe=SUB_DELIMS + ":/?#[]@") + ) + + # The parsed ASCII bytestrings are our canonical form. + # All properties of the URL are derived from these. + return ParseResult( + parsed_scheme, + parsed_userinfo, + parsed_host, + parsed_port, + parsed_path, + parsed_query, + parsed_fragment, + ) + + +def encode_host(host: str) -> str: + if not host: + return "" + + elif IPv4_STYLE_HOSTNAME.match(host): + # Validate IPv4 hostnames like #.#.#.# + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + try: + ipaddress.IPv4Address(host) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv4 address: {host!r}") + return host + + elif IPv6_STYLE_HOSTNAME.match(host): + # Validate IPv6 hostnames like [...] + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # "A host identified by an Internet Protocol literal address, version 6 + # [RFC3513] or later, is distinguished by enclosing the IP literal + # within square brackets ("[" and "]"). This is the only place where + # square bracket characters are allowed in the URI syntax." + try: + ipaddress.IPv6Address(host[1:-1]) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv6 address: {host!r}") + return host[1:-1] + + elif host.isascii(): + # Regular ASCII hostnames + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # reg-name = *( unreserved / pct-encoded / sub-delims ) + return quote(host.lower(), safe=SUB_DELIMS) + + # IDNA hostnames + try: + return idna.encode(host.lower()).decode("ascii") + except idna.IDNAError: + raise InvalidURL(f"Invalid IDNA hostname: {host!r}") + + +def normalize_port(port: str | int | None, scheme: str) -> int | None: + # From https://tools.ietf.org/html/rfc3986#section-3.2.3 + # + # "A scheme may define a default port. For example, the "http" scheme + # defines a default port of "80", corresponding to its reserved TCP + # port number. The type of port designated by the port number (e.g., + # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and + # normalizers should omit the port component and its ":" delimiter if + # port is empty or if its value would be the same as that of the + # scheme's default." + if port is None or port == "": + return None + + try: + port_as_int = int(port) + except ValueError: + raise InvalidURL(f"Invalid port: {port!r}") + + # See https://url.spec.whatwg.org/#url-miscellaneous + default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( + scheme + ) + if port_as_int == default_port: + return None + return port_as_int + + +def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: + """ + Path validation rules that depend on if the URL contains + a scheme or authority component. + + See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 + """ + if has_authority: + # If a URI contains an authority component, then the path component + # must either be empty or begin with a slash ("/") character." + if path and not path.startswith("/"): + raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") + else: + # If a URI does not contain an authority component, then the path cannot begin + # with two slash characters ("//"). + if path.startswith("//"): + raise InvalidURL( + "URLs with no authority component cannot have a path starting with '//'" + ) + # In addition, a URI reference (Section 4.1) may be a relative-path reference, + # in which case the first path segment cannot contain a colon (":") character. + if path.startswith(":") and not has_scheme: + raise InvalidURL( + "URLs with no scheme component cannot have a path starting with ':'" + ) + + +def normalize_path(path: str) -> str: + """ + Drop "." and ".." segments from a URL path. + + For example: + + normalize_path("/path/./to/somewhere/..") == "/path/to" + """ + # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 + components = path.split("/") + output: list[str] = [] + for component in components: + if component == ".": + pass + elif component == "..": + if output and output != [""]: + output.pop() + else: + output.append(component) + return "/".join(output) + + +def percent_encode(char: str) -> str: + """ + Replace a single character with the percent-encoded representation. + + Characters outside the ASCII range are represented with their a percent-encoded + representation of their UTF-8 byte sequence. + + For example: + + percent_encode(" ") == "%20" + """ + return "".join([f"%{byte:02x}" for byte in char.encode("utf-8")]).upper() + + +def is_safe(string: str, safe: str = "/") -> bool: + """ + Determine if a given string is already quote-safe. + """ + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + "%" + + # All characters must already be non-escaping or '%' + for char in string: + if char not in NON_ESCAPED_CHARS: + return False + + return True + + +def percent_encoded(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string. + """ + if is_safe(string, safe=safe): + return string + + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + return "".join( + [char if char in NON_ESCAPED_CHARS else percent_encode(char) for char in string] + ) + + +def quote(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string, omitting existing '%xx' escape sequences. + + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1 + + * `string`: The string to be percent-escaped. + * `safe`: A string containing characters that may be treated as safe, and do not + need to be escaped. Unreserved characters are always treated as safe. + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3 + """ + parts = [] + current_position = 0 + for match in re.finditer(PERCENT_ENCODED_REGEX, string): + start_position, end_position = match.start(), match.end() + matched_text = match.group(0) + # Add any text up to the '%xx' escape sequence. + if start_position != current_position: + leading_text = string[current_position:start_position] + parts.append(percent_encoded(leading_text, safe=safe)) + + # Add the '%xx' escape sequence. + parts.append(matched_text) + current_position = end_position + + # Add any text after the final '%xx' escape sequence. + if current_position != len(string): + trailing_text = string[current_position:] + parts.append(percent_encoded(trailing_text, safe=safe)) + + return "".join(parts) + + +def urlencode(items: list[tuple[str, str]]) -> str: + """ + We can use a much simpler version of the stdlib urlencode here because + we don't need to handle a bunch of different typing cases, such as bytes vs str. + + https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926 + + Note that we use '%20' encoding for spaces. and '%2F for '/'. + This is slightly different than `requests`, but is the behaviour that browsers use. + + See + - https://github.com/encode/httpx/issues/2536 + - https://github.com/encode/httpx/issues/2721 + - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return "&".join( + [ + percent_encoded(k, safe="") + "=" + percent_encoded(v, safe="") + for k, v in items + ] + ) diff --git a/.venv/Lib/site-packages/httpx/_urls.py b/.venv/Lib/site-packages/httpx/_urls.py new file mode 100644 index 00000000..43dedd56 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_urls.py @@ -0,0 +1,646 @@ +from __future__ import annotations + +import typing +from urllib.parse import parse_qs, unquote + +import idna + +from ._types import QueryParamTypes, RawURL, URLTypes +from ._urlparse import urlencode, urlparse +from ._utils import primitive_value_to_str + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port + normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work + with `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can + only be properly URL escaped when decoding the parameter names and values + themselves. + """ + + def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None: + if kwargs: + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for URL()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + if isinstance(value, bytes): + kwargs[key] = value.decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + if isinstance(url, str): + self._uri_reference = urlparse(url, **kwargs) + elif isinstance(url, URL): + self._uri_reference = url._uri_reference.copy_with(**kwargs) + else: + raise TypeError( + "Invalid type for url. Expected str or httpx.URL," + f" got {type(url)}: {url!r}" + ) + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + return self._uri_reference.userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + return self._uri_reference.host.encode("ascii") + + @property + def port(self) -> int | None: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + return self._uri_reference.port + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + return self._uri_reference.netloc.encode("ascii") + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is necessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> QueryParams: + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def raw(self) -> RawURL: + """ + Provides the (scheme, host, port, target) for the outgoing request. + + In older versions of `httpx` this was used in the low-level transport API. + We no longer use `RawURL`, and this property will be deprecated + in a future release. + """ + return RawURL( + self.raw_scheme, + self.raw_host, + self.port, + self.raw_path, + ) + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> URL: + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with( + username="jo@gmail.com", password="a secret" + ) + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + return URL(self, **kwargs) + + def copy_set_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> URL: + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> URL: + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URLTypes) -> URL: + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + from urllib.parse import urljoin + + return URL(urljoin(str(self), str(URL(url)))) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return str(self._uri_reference) + + def __repr__(self) -> str: + scheme, userinfo, host, port, path, query, fragment = self._uri_reference + + if ":" in userinfo: + # Mask any password component. + userinfo = f'{userinfo.split(":")[0]}:[secure]' + + authority = "".join( + [ + f"{userinfo}@" if userinfo else "", + f"[{host}]" if ":" in host else host, + f":{port}" if port is not None else "", + ] + ) + url = "".join( + [ + f"{self.scheme}:" if scheme else "", + f"//{authority}" if authority else "", + path, + f"?{query}" if query is not None else "", + f"#{fragment}" if fragment is not None else "", + ] + ) + + return f"{self.__class__.__name__}({url!r})" + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value, keep_blank_values=True) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: dict[typing.Any, list[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView[str]: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView[str]: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: list[tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> list[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> QueryParams: + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: QueryParamTypes | None = None) -> QueryParams: + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + """ + Note that we use '%20' encoding for spaces, and treat '/' as a safe + character. + + See https://github.com/encode/httpx/issues/2536 and + https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: QueryParamTypes | None = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) diff --git a/.venv/Lib/site-packages/httpx/_utils.py b/.venv/Lib/site-packages/httpx/_utils.py new file mode 100644 index 00000000..a9ece194 --- /dev/null +++ b/.venv/Lib/site-packages/httpx/_utils.py @@ -0,0 +1,440 @@ +from __future__ import annotations + +import codecs +import email.message +import ipaddress +import mimetypes +import os +import re +import time +import typing +from pathlib import Path +from urllib.request import getproxies + +import sniffio + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._urls import URL + + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def normalize_header_key( + value: str | bytes, + lower: bool, + encoding: str | None = None, +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + if isinstance(value, bytes): + bytes_value = value + else: + bytes_value = value.encode(encoding or "ascii") + + return bytes_value.lower() if lower else bytes_value + + +def normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + return value.encode(encoding or "ascii") + + +def primitive_value_to_str(value: PrimitiveData) -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def format_form_param(name: str, value: str) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +def get_ca_bundle_from_env() -> str | None: + if "SSL_CERT_FILE" in os.environ: + ssl_file = Path(os.environ["SSL_CERT_FILE"]) + if ssl_file.is_file(): + return str(ssl_file) + if "SSL_CERT_DIR" in os.environ: + ssl_path = Path(os.environ["SSL_CERT_DIR"]) + if ssl_path.is_dir(): + return str(ssl_path) + return None + + +def parse_header_links(value: str) -> list[dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: list[dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +def parse_content_type_charset(content_type: str) -> str | None: + # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. + # See: https://peps.python.org/pep-0594/#cgi + msg = email.message.Message() + msg["content-type"] = content_type + return msg.get_content_charset(failobj=None) + + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def obfuscate_sensitive_headers( + items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]], +) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +def port_or_default(url: URL) -> int | None: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def same_origin(url: URL, other: URL) -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and port_or_default(url) == port_or_default(other) + ) + + +def is_https_redirect(url: URL, location: URL) -> bool: + """ + Return 'True' if 'location' is a HTTPS upgrade of 'url' + """ + if url.host != location.host: + return False + + return ( + url.scheme == "http" + and port_or_default(url) == 80 + and location.scheme == "https" + and port_or_default(location) == 443 + ) + + +def get_environment_proxies() -> dict[str, str | None]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: dict[str, str | None] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # separated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" + # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 + if "://" in hostname: + mounts[hostname] = None + elif is_ipv4_hostname(hostname): + mounts[f"all://{hostname}"] = None + elif is_ipv6_hostname(hostname): + mounts[f"all://[{hostname}]"] = None + elif hostname.lower() == "localhost": + mounts[f"all://{hostname}"] = None + else: + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: str | bytes, encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def guess_content_type(filename: str | None) -> str | None: + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def peek_filelike_length(stream: typing.Any) -> int | None: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class Timer: + async def _get_time(self) -> float: + library = sniffio.current_async_library() + if library == "trio": + import trio + + return trio.current_time() + else: + import asyncio + + return asyncio.get_event_loop().time() + + def sync_start(self) -> None: + self.started = time.perf_counter() + + async def async_start(self) -> None: + self.started = await self._get_time() + + def sync_elapsed(self) -> float: + now = time.perf_counter() + return now - self.started + + async def async_elapsed(self) -> float: + now = await self._get_time() + return now - self.started + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all://") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https://") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._urls import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Pattern[str] | None = None + elif url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: URL) -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> tuple[int, int, int]: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: URLPattern) -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern + + +def is_ipv4_hostname(hostname: str) -> bool: + try: + ipaddress.IPv4Address(hostname.split("/")[0]) + except Exception: + return False + return True + + +def is_ipv6_hostname(hostname: str) -> bool: + try: + ipaddress.IPv6Address(hostname.split("/")[0]) + except Exception: + return False + return True diff --git a/.venv/Lib/site-packages/httpx/py.typed b/.venv/Lib/site-packages/httpx/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/__init__.cpython-311.pyc index 754fd0e9..97a5f672 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_commit_api.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_commit_api.cpython-311.pyc index 4f7693e3..3e49368e 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_commit_api.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_commit_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_inference_endpoints.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_inference_endpoints.cpython-311.pyc index aeccd5b6..fec2bcf1 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_inference_endpoints.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_inference_endpoints.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_multi_commits.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_multi_commits.cpython-311.pyc index edd8eafc..e0a31713 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_multi_commits.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_multi_commits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_space_api.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_space_api.cpython-311.pyc index 101470a2..fb92187d 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/_space_api.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/_space_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/community.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/community.cpython-311.pyc index 35ebf423..c638c642 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/community.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/community.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/constants.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/constants.cpython-311.pyc index d0e879a3..bbc02e42 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/constants.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/errors.cpython-311.pyc index 2643f14a..8de73924 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/file_download.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/file_download.cpython-311.pyc index 9f003b7e..82f3de11 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/file_download.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/file_download.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/hf_api.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/hf_api.cpython-311.pyc index 303c53d8..0578d190 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/hf_api.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/hf_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/lfs.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/lfs.cpython-311.pyc index d4b9ad2a..9cb2f737 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/lfs.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/lfs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard.cpython-311.pyc index 048ac095..c9f2a63c 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard_data.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard_data.cpython-311.pyc index 0cc238db..ef8d9190 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard_data.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/__pycache__/repocard_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-311.pyc index 02e500f1..52e3ef4b 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-311.pyc index aa7652de..6ab568a7 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_common.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_common.cpython-311.pyc index 1c06e6ff..a78d2464 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_common.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_templating.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_templating.cpython-311.pyc index 736f0a5d..5ea33679 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_templating.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_templating.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_types.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_types.cpython-311.pyc index 1c50ba83..5a9323f2 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_types.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/__pycache__/_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/__init__.cpython-311.pyc index 7ba3b51f..8a0675bf 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-311.pyc index b64932e6..a647d121 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/__init__.cpython-311.pyc index 1d82a7fa..d408d610 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_classification.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_classification.cpython-311.pyc index c0a49f79..ff7b6bb7 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_classification.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_classification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-311.pyc index fbd212e4..e37b2a25 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/automatic_speech_recognition.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/automatic_speech_recognition.cpython-311.pyc index 8f3264c2..cf89da03 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/automatic_speech_recognition.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/automatic_speech_recognition.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-311.pyc index 501b26a5..b7b99cdc 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-311.pyc index c10e64cd..82f42ada 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/depth_estimation.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/depth_estimation.cpython-311.pyc index 63fadd82..03ce565b 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/depth_estimation.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/depth_estimation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/document_question_answering.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/document_question_answering.cpython-311.pyc index cb853860..4af0ccbf 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/document_question_answering.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/document_question_answering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-311.pyc index 135d718a..e5200700 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-311.pyc index f75b1301..4c82d470 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-311.pyc index a8e0c995..8d53c505 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-311.pyc index 06465bf6..2b25a9b0 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-311.pyc index 2a74bbed..d1434763 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_text.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_text.cpython-311.pyc index eab0a0fb..65476c6e 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_text.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_text.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-311.pyc index c7f036a6..84e089c4 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/question_answering.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/question_answering.cpython-311.pyc index 9c5ca98d..a2ea932a 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/question_answering.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/question_answering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-311.pyc index c692f550..9d27dca1 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/summarization.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/summarization.cpython-311.pyc index 6de76493..050e9f11 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/summarization.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/summarization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-311.pyc index ff43fe79..ab6e3bc9 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text2text_generation.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text2text_generation.cpython-311.pyc index 948b3ab6..d01c92b1 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text2text_generation.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text2text_generation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-311.pyc index 76fa4293..25312a8e 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_generation.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_generation.cpython-311.pyc index 1d0633a5..a731afdd 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_generation.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_generation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_audio.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_audio.cpython-311.pyc index 7c04e373..a2cb4a90 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_audio.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_audio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_image.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_image.cpython-311.pyc index bc4c563c..ab207279 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_image.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_to_image.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-311.pyc index 12e1d282..e5bc3b4f 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/translation.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/translation.cpython-311.pyc index 7a96d0e9..c4a14521 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/translation.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/translation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/video_classification.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/video_classification.cpython-311.pyc index d37e76c1..ca305f25 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/video_classification.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/video_classification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/visual_question_answering.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/visual_question_answering.cpython-311.pyc index c8484160..79dba23c 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/visual_question_answering.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/visual_question_answering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_classification.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_classification.cpython-311.pyc index 37b8801f..eb8c72ae 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_classification.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_classification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-311.pyc index 8116bb26..f3a2258b 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-311.pyc index e4c1d6fb..e6944b77 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-311.pyc index cf9a5675..0d90af30 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_assets.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_assets.cpython-311.pyc index c1621bfd..1c409b78 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_assets.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_assets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-311.pyc index d5f3a0dc..6a49ef88 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-311.pyc index 6c4707d8..bea16a4b 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-311.pyc index a3aa0890..69d40d34 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_deprecation.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_deprecation.cpython-311.pyc index a9c39d8f..e634ba16 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_deprecation.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_deprecation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_errors.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_errors.cpython-311.pyc index 0070e573..86c45cf9 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_errors.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-311.pyc index c9e63584..af144d95 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_fixes.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_fixes.cpython-311.pyc index cb931016..1bc9d7b1 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_fixes.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_fixes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_git_credential.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_git_credential.cpython-311.pyc index 31ba2e5d..ea5e0cb8 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_git_credential.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_git_credential.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_headers.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_headers.cpython-311.pyc index 80480cdb..0977e855 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_headers.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_headers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_hf_folder.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_hf_folder.cpython-311.pyc index 5abf593e..0b4476ba 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_hf_folder.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_hf_folder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-311.pyc index bb533ba8..7fdc9b5c 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_pagination.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_pagination.cpython-311.pyc index 40841378..ecc86df1 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_pagination.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_pagination.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_paths.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_paths.cpython-311.pyc index d8cc031a..313f59cd 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_paths.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_paths.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_runtime.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_runtime.cpython-311.pyc index 2321ffb7..fca21c46 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_runtime.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_runtime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_safetensors.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_safetensors.cpython-311.pyc index 0974d795..25bef58d 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_safetensors.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_safetensors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-311.pyc index 04cb365a..97e9cfdf 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-311.pyc index 4b846b9d..04ccfc0c 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_token.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_token.cpython-311.pyc index ebc0747d..0cfe8079 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_token.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_token.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-311.pyc index 652fe4c9..21b8017f 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_validators.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_validators.cpython-311.pyc index 80de666b..fd543773 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_validators.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/_validators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/endpoint_helpers.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/endpoint_helpers.cpython-311.pyc index 1c57188e..e83e67ee 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/endpoint_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/endpoint_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-311.pyc index fd496fa3..dd495bbc 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/logging.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/logging.cpython-311.pyc index 57c0a68b..483784d1 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/logging.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/logging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/sha.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/sha.cpython-311.pyc index 59662317..8926b777 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/sha.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/sha.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/tqdm.cpython-311.pyc b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/tqdm.cpython-311.pyc index 350ba362..f9277935 100644 Binary files a/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/tqdm.cpython-311.pyc and b/.venv/Lib/site-packages/huggingface_hub/utils/__pycache__/tqdm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/idna/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/idna/__pycache__/__init__.cpython-311.pyc index dc5f8e2b..569fa057 100644 Binary files a/.venv/Lib/site-packages/idna/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/idna/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/idna/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/idna/__pycache__/core.cpython-311.pyc index acefc6e7..eeb9aee6 100644 Binary files a/.venv/Lib/site-packages/idna/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/idna/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-311.pyc b/.venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-311.pyc index efd97fc9..0f91033d 100644 Binary files a/.venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-311.pyc and b/.venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/idna/__pycache__/intranges.cpython-311.pyc b/.venv/Lib/site-packages/idna/__pycache__/intranges.cpython-311.pyc index 2a05cbd9..506841b3 100644 Binary files a/.venv/Lib/site-packages/idna/__pycache__/intranges.cpython-311.pyc and b/.venv/Lib/site-packages/idna/__pycache__/intranges.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/idna/__pycache__/package_data.cpython-311.pyc b/.venv/Lib/site-packages/idna/__pycache__/package_data.cpython-311.pyc index 1d9bc382..a3dc24fa 100644 Binary files a/.venv/Lib/site-packages/idna/__pycache__/package_data.cpython-311.pyc and b/.venv/Lib/site-packages/idna/__pycache__/package_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio-2.34.1.dist-info/INSTALLER b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/imageio-2.34.1.dist-info/LICENSE b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/LICENSE new file mode 100644 index 00000000..33b13520 --- /dev/null +++ b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2014-2022, imageio developers +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/.venv/Lib/site-packages/imageio-2.34.1.dist-info/METADATA b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/METADATA new file mode 100644 index 00000000..6dd6a621 --- /dev/null +++ b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/METADATA @@ -0,0 +1,133 @@ +Metadata-Version: 2.1 +Name: imageio +Version: 2.34.1 +Summary: Library for reading and writing a wide range of image, video, scientific, and volumetric data formats. +Home-page: https://github.com/imageio/imageio +Download-URL: http://pypi.python.org/pypi/imageio +Author: imageio contributors +Author-email: almar.klein@gmail.com +License: BSD-2-Clause +Keywords: image video volume imread imwrite io animation ffmpeg +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Provides: imageio +Requires-Python: >=3.8 +License-File: LICENSE +Requires-Dist: numpy +Requires-Dist: pillow >=8.3.2 +Provides-Extra: all-plugins +Requires-Dist: astropy ; extra == 'all-plugins' +Requires-Dist: av ; extra == 'all-plugins' +Requires-Dist: imageio-ffmpeg ; extra == 'all-plugins' +Requires-Dist: pillow-heif ; extra == 'all-plugins' +Requires-Dist: psutil ; extra == 'all-plugins' +Requires-Dist: tifffile ; extra == 'all-plugins' +Provides-Extra: all-plugins-pypy +Requires-Dist: av ; extra == 'all-plugins-pypy' +Requires-Dist: imageio-ffmpeg ; extra == 'all-plugins-pypy' +Requires-Dist: pillow-heif ; extra == 'all-plugins-pypy' +Requires-Dist: psutil ; extra == 'all-plugins-pypy' +Requires-Dist: tifffile ; extra == 'all-plugins-pypy' +Provides-Extra: bsdf +Provides-Extra: build +Requires-Dist: wheel ; extra == 'build' +Provides-Extra: dev +Requires-Dist: pytest ; extra == 'dev' +Requires-Dist: pytest-cov ; extra == 'dev' +Requires-Dist: fsspec[github] ; extra == 'dev' +Requires-Dist: black ; extra == 'dev' +Requires-Dist: flake8 ; extra == 'dev' +Provides-Extra: dicom +Provides-Extra: docs +Requires-Dist: sphinx <6 ; extra == 'docs' +Requires-Dist: numpydoc ; extra == 'docs' +Requires-Dist: pydata-sphinx-theme ; extra == 'docs' +Provides-Extra: feisem +Provides-Extra: ffmpeg +Requires-Dist: imageio-ffmpeg ; extra == 'ffmpeg' +Requires-Dist: psutil ; extra == 'ffmpeg' +Provides-Extra: fits +Requires-Dist: astropy ; extra == 'fits' +Provides-Extra: freeimage +Provides-Extra: full +Requires-Dist: astropy ; extra == 'full' +Requires-Dist: av ; extra == 'full' +Requires-Dist: black ; extra == 'full' +Requires-Dist: flake8 ; extra == 'full' +Requires-Dist: fsspec[github] ; extra == 'full' +Requires-Dist: gdal ; extra == 'full' +Requires-Dist: imageio-ffmpeg ; extra == 'full' +Requires-Dist: itk ; extra == 'full' +Requires-Dist: numpydoc ; extra == 'full' +Requires-Dist: pillow-heif ; extra == 'full' +Requires-Dist: psutil ; extra == 'full' +Requires-Dist: pydata-sphinx-theme ; extra == 'full' +Requires-Dist: pytest ; extra == 'full' +Requires-Dist: pytest-cov ; extra == 'full' +Requires-Dist: sphinx <6 ; extra == 'full' +Requires-Dist: tifffile ; extra == 'full' +Requires-Dist: wheel ; extra == 'full' +Provides-Extra: gdal +Requires-Dist: gdal ; extra == 'gdal' +Provides-Extra: itk +Requires-Dist: itk ; extra == 'itk' +Provides-Extra: linting +Requires-Dist: black ; extra == 'linting' +Requires-Dist: flake8 ; extra == 'linting' +Provides-Extra: lytro +Provides-Extra: numpy +Provides-Extra: pillow +Provides-Extra: pillow-heif +Requires-Dist: pillow-heif ; extra == 'pillow-heif' +Provides-Extra: pyav +Requires-Dist: av ; extra == 'pyav' +Provides-Extra: simpleitk +Provides-Extra: spe +Provides-Extra: swf +Provides-Extra: test +Requires-Dist: pytest ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: fsspec[github] ; extra == 'test' +Provides-Extra: tifffile +Requires-Dist: tifffile ; extra == 'tifffile' + + +.. image:: https://github.com/imageio/imageio/workflows/CI/badge.svg + :target: https://github.com/imageio/imageio/actions + + +Imageio is a Python library that provides an easy interface to read and +write a wide range of image data, including animated images, volumetric +data, and scientific formats. It is cross-platform, runs on Python 3.5+, +and is easy to install. + +Main website: https://imageio.readthedocs.io/ + + +Release notes: https://github.com/imageio/imageio/blob/master/CHANGELOG.md + +Example: + +.. code-block:: python + + >>> import imageio + >>> im = imageio.imread('imageio:astronaut.png') + >>> im.shape # im is a numpy array + (512, 512, 3) + >>> imageio.imwrite('astronaut-gray.jpg', im[:, :, 0]) + +See the `API Reference `_ +or `examples `_ +for more information. diff --git a/.venv/Lib/site-packages/imageio-2.34.1.dist-info/RECORD b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/RECORD new file mode 100644 index 00000000..2b022131 --- /dev/null +++ b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/RECORD @@ -0,0 +1,114 @@ +../../Scripts/imageio_download_bin.exe,sha256=auPN0XCetW5_K5jpZPQbTKCkvvbictNkaflMbB05jYY,108427 +../../Scripts/imageio_remove_bin.exe,sha256=d-C4E_U1pLsN5G14zch-I3mdGalhk0CDJs_z1wKuD-w,108423 +imageio-2.34.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +imageio-2.34.1.dist-info/LICENSE,sha256=rlmepQpJTvtyXkIKqzXR91kgDP5BhrbGSjC6Sds_0GQ,1307 +imageio-2.34.1.dist-info/METADATA,sha256=7UriWnkUpokY-aFYbqmn1WcheyJ_0fhATvgR5qhDIMU,4875 +imageio-2.34.1.dist-info/RECORD,, +imageio-2.34.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +imageio-2.34.1.dist-info/entry_points.txt,sha256=0-yB6XGfrx1OMPw_xigPramTcwi5M4jX6L5Edrz0OoU,130 +imageio-2.34.1.dist-info/top_level.txt,sha256=iSUjc-wEw-xbMTvMOSKg85n0-E7Ms--Mo4FLMC-J2YM,8 +imageio/__init__.py,sha256=fsxBsbnfUGdoUmac1x0sGHe0lTUPgQARjSeREhuLH7M,3272 +imageio/__main__.py,sha256=s5nidb9wRZ6AbimHTPHULt3sTXPx4mqNil67KJHZvd4,5393 +imageio/__pycache__/__init__.cpython-311.pyc,, +imageio/__pycache__/__main__.cpython-311.pyc,, +imageio/__pycache__/freeze.cpython-311.pyc,, +imageio/__pycache__/testing.cpython-311.pyc,, +imageio/__pycache__/typing.cpython-311.pyc,, +imageio/__pycache__/v2.cpython-311.pyc,, +imageio/__pycache__/v3.cpython-311.pyc,, +imageio/config/__init__.py,sha256=8NOpL5ePrkiioJb9hRBw3rydc4iNZkMwp7VdQlP4jDc,307 +imageio/config/__pycache__/__init__.cpython-311.pyc,, +imageio/config/__pycache__/extensions.cpython-311.pyc,, +imageio/config/__pycache__/plugins.cpython-311.pyc,, +imageio/config/extensions.py,sha256=2pXqdJLXn4XDvbVml4Efzfjw9smw9ROM--JE9_db-tc,47004 +imageio/config/extensions.pyi,sha256=sLrA-wt09kPHBDJP79tGtEOX7XTcEEjRzA70O8BCsD0,605 +imageio/config/plugins.py,sha256=j7suVaEDiQwutMXcBZPxO_OA7G_7STwhCaZ-8o2zwio,20157 +imageio/config/plugins.pyi,sha256=pzH8pacqU5uldsvYOee_nhd2Hkk3mR8VQBtjeVnkkHY,706 +imageio/core/__init__.py,sha256=PSkGH8K76ntSWhwM4j7W49UmCSZf_OGaSl9fNbQP7uQ,639 +imageio/core/__pycache__/__init__.cpython-311.pyc,, +imageio/core/__pycache__/fetching.cpython-311.pyc,, +imageio/core/__pycache__/findlib.cpython-311.pyc,, +imageio/core/__pycache__/format.cpython-311.pyc,, +imageio/core/__pycache__/imopen.cpython-311.pyc,, +imageio/core/__pycache__/legacy_plugin_wrapper.cpython-311.pyc,, +imageio/core/__pycache__/request.cpython-311.pyc,, +imageio/core/__pycache__/util.cpython-311.pyc,, +imageio/core/__pycache__/v3_plugin_api.cpython-311.pyc,, +imageio/core/fetching.py,sha256=r81yBsJMqkwAXeVAuQuAzbk9etWxQUEUe4__UUjpQpc,9176 +imageio/core/findlib.py,sha256=Zrhs0rEyp8p8iSIuCoBco0dCaB5dxJVZ4lRgv82Sqm0,5552 +imageio/core/format.py,sha256=glQcJOZHEOST3u0jOa338ZxJBX_daEe6xl7-UKxuU6E,30917 +imageio/core/format.pyi,sha256=5BZF-xwp5BmG8C5ahfL48z_a2MITN0509Uf6f1phZRw,3336 +imageio/core/imopen.py,sha256=SA4OJj93B09CHsKSILdH1w3zdVWvRSopNWlGlS0f4t0,9752 +imageio/core/imopen.pyi,sha256=QcVF5tUjy6qrAK2P5J_9wj2Heb3dt9Uyz3RKZpJCfjE,1982 +imageio/core/legacy_plugin_wrapper.py,sha256=CYGXhJY-18HkVYqyzlepM7NcZ9VLvBjFjNj64HOBqBM,12136 +imageio/core/legacy_plugin_wrapper.pyi,sha256=ENmdth_Avp2yTzuyInGWT2QXgAv72RrFRd6QH71LVqU,1064 +imageio/core/request.py,sha256=vG5n2gAu4GUdsBdajcUCalSnJJSv0wdzad4DT3iDIF8,26826 +imageio/core/request.pyi,sha256=ivqAXs3UfxhuXQfg8qsAtEVymCsppPwadztFzSXpIAo,2315 +imageio/core/util.py,sha256=Gt4NiZYKXjeB5AgyiHOFi4ntn7iTcSjj8X_kDz2R6DM,18657 +imageio/core/v3_plugin_api.py,sha256=w8wUjlT7_N6aU76DYGF3ubYYfUHTyfStvK5_xosZLPQ,15560 +imageio/freeze.py,sha256=hi9MNZz-ridgQBWcAqnd92sULek2lgmBSTmuott5lus,170 +imageio/plugins/__init__.py,sha256=GSxtio0ph5QHP2asdLvyzW8lVfiRqOii8kaqYsBO9CE,3469 +imageio/plugins/__pycache__/__init__.cpython-311.pyc,, +imageio/plugins/__pycache__/_bsdf.cpython-311.pyc,, +imageio/plugins/__pycache__/_dicom.cpython-311.pyc,, +imageio/plugins/__pycache__/_freeimage.cpython-311.pyc,, +imageio/plugins/__pycache__/_swf.cpython-311.pyc,, +imageio/plugins/__pycache__/_tifffile.cpython-311.pyc,, +imageio/plugins/__pycache__/bsdf.cpython-311.pyc,, +imageio/plugins/__pycache__/dicom.cpython-311.pyc,, +imageio/plugins/__pycache__/example.cpython-311.pyc,, +imageio/plugins/__pycache__/feisem.cpython-311.pyc,, +imageio/plugins/__pycache__/ffmpeg.cpython-311.pyc,, +imageio/plugins/__pycache__/fits.cpython-311.pyc,, +imageio/plugins/__pycache__/freeimage.cpython-311.pyc,, +imageio/plugins/__pycache__/freeimagemulti.cpython-311.pyc,, +imageio/plugins/__pycache__/gdal.cpython-311.pyc,, +imageio/plugins/__pycache__/grab.cpython-311.pyc,, +imageio/plugins/__pycache__/lytro.cpython-311.pyc,, +imageio/plugins/__pycache__/npz.cpython-311.pyc,, +imageio/plugins/__pycache__/opencv.cpython-311.pyc,, +imageio/plugins/__pycache__/pillow.cpython-311.pyc,, +imageio/plugins/__pycache__/pillow_info.cpython-311.pyc,, +imageio/plugins/__pycache__/pillow_legacy.cpython-311.pyc,, +imageio/plugins/__pycache__/pillowmulti.cpython-311.pyc,, +imageio/plugins/__pycache__/pyav.cpython-311.pyc,, +imageio/plugins/__pycache__/simpleitk.cpython-311.pyc,, +imageio/plugins/__pycache__/spe.cpython-311.pyc,, +imageio/plugins/__pycache__/swf.cpython-311.pyc,, +imageio/plugins/__pycache__/tifffile.cpython-311.pyc,, +imageio/plugins/__pycache__/tifffile_v3.cpython-311.pyc,, +imageio/plugins/_bsdf.py,sha256=b-QjkZvz9DPDbygiKhee-47Ld2eOqxpYEdZ1mnrRPJ4,32753 +imageio/plugins/_dicom.py,sha256=J2uWs5eiEJ0bvetgBOMYJ8rhy2HIbVnEa8UhSA_nc0Y,34060 +imageio/plugins/_freeimage.py,sha256=GD25ZqqvbFnBILPRYHrTb5qbFsvXBVKv_qIE3139D68,51740 +imageio/plugins/_swf.py,sha256=kh3H2v98bgHpVagGNbhGUodh0s-weiESraX6qzMnD2k,25760 +imageio/plugins/_tifffile.py,sha256=C_yZBRmEZ77jT26Xqxf3UpOr36WNjMSrD__B1_p6Fyk,371536 +imageio/plugins/bsdf.py,sha256=spISvLLVH319wDJ8YhYcvDTaJe2acElgWSvgqEkpd_g,12852 +imageio/plugins/dicom.py,sha256=mQYNbTyum4jVhjZQ8TU-4A5csHpQfT-BRBBCP5fu6Zs,12621 +imageio/plugins/example.py,sha256=4POb_LDQtSxHWxiflGqGKKKKrpItqLIFQeU8x7tro-c,5501 +imageio/plugins/feisem.py,sha256=AKwZv7Zac0_grnr-wnzU7R0Zf2KSUe91k06evPa1NI8,3360 +imageio/plugins/ffmpeg.py,sha256=N8Qq1TU5gr7U9IM-FCEuM9VIy1Jv875OC_XorStoOPI,29930 +imageio/plugins/fits.py,sha256=XnlmeC79sIiIPd_7IDx05-p3-b2unO4CVR0nWAA4ph0,4531 +imageio/plugins/freeimage.py,sha256=SuzYuGvCtZIiXIr51dWRTl5CATzRUqb8pNCSIg9YZv8,14645 +imageio/plugins/freeimagemulti.py,sha256=7jW3mJX-ZVnDqe2veIvU9wPY_x0EBOmPKP8ppPxRO_M,11288 +imageio/plugins/gdal.py,sha256=r2Ux7MQeHCUsmdk0aGENzGX8M5hCBU7NJomcf6G8FCU,1653 +imageio/plugins/grab.py,sha256=g6KbKVQUquHro_BW6He7NNmivVV-UtcsCJoDt3rdly0,2776 +imageio/plugins/lytro.py,sha256=V3dToE-eV6jLhtae26_uHHgOx6O1LsOo0hm7nnRptMM,25310 +imageio/plugins/npz.py,sha256=7ZQr-4lQEKbfjaF6rOmpq9pQgDTUHvkZa_NHZkJWBQo,2670 +imageio/plugins/opencv.py,sha256=C2nBQQFDXuz6LOyJ1P3-S6e_7h-pJgLow7h7w4Si2tg,11629 +imageio/plugins/pillow.py,sha256=4siuR0UENadfQgdQ2z5bFWX464KMzMcfqIEKEBDzt6M,22318 +imageio/plugins/pillow_info.py,sha256=Bt5iJtQnAh6mGViPIxhxRQPNidqay9-6BleAJZkhN1w,36624 +imageio/plugins/pillow_legacy.py,sha256=nduUoks0Jp4fbizSBDqGCD__hVLQdRB6dgEbyfjOtHE,31714 +imageio/plugins/pillowmulti.py,sha256=-wsWpq0j2WXDgQGbyUuzCmw7iqSDz7e6AYqYhs46ZE8,11807 +imageio/plugins/pyav.py,sha256=sRcR3x-rip1vtHi_hf2sUcKQ6ica4zft8Bewc8zEbO0,44959 +imageio/plugins/simpleitk.py,sha256=ldQWjkiCSZPoUnN87MtUqRIMMcIKmk8ZUeyDCQhnpG0,4107 +imageio/plugins/spe.py,sha256=UyXgHZV-3gwwU-RmJUhgDnJ843wt9H3S3Fjs84faz38,32172 +imageio/plugins/swf.py,sha256=0B9f-HF528OcHXTIF3nptoSJUu4GNId03rFLfFFOaFk,11756 +imageio/plugins/tifffile.py,sha256=m8qgNy-lJkwHwKkyp3pZn2xYsnRRwZ8FVMpM-BIs6dI,20665 +imageio/plugins/tifffile_v3.py,sha256=Vs2ngBBptUoJ6QpT9EjyNd4-dih8zzGEvcq2mRNYFXg,14335 +imageio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +imageio/testing.py,sha256=tkRPxZZpG68q_MAIux8WE8QeKbhbq6rDPVfCDsof1Ms,1597 +imageio/typing.py,sha256=qrvyFrVIs21bZCE0x802l1R-xCV4DlCNaTzPiJEZbzc,349 +imageio/v2.py,sha256=1KJ5z8Ji2nnAdy_K3vIpysG2Kg7rIcPiadNG1pwKx-E,21563 +imageio/v2.pyi,sha256=ROazbwu1rSJLBaEtXmUG2oT9BMr7ZlyyW26twgFWx5E,2250 +imageio/v3.py,sha256=ZE0IlERPT_4wryYqUOD4-LLc6dVpDZXV6N6JEQtbMiQ,9267 +imageio/v3.pyi,sha256=AtLP0IWqS-sX1qDyHPdjCCIsKGwXU5z41XOXzUj2pGQ,1344 diff --git a/.venv/Lib/site-packages/imageio-2.34.1.dist-info/WHEEL b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/WHEEL new file mode 100644 index 00000000..bab98d67 --- /dev/null +++ b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/imageio-2.34.1.dist-info/entry_points.txt b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/entry_points.txt new file mode 100644 index 00000000..aa30161a --- /dev/null +++ b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +imageio_download_bin = imageio.__main__:download_bin_main +imageio_remove_bin = imageio.__main__:remove_bin_main diff --git a/.venv/Lib/site-packages/imageio-2.34.1.dist-info/top_level.txt b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/top_level.txt new file mode 100644 index 00000000..a464e4cd --- /dev/null +++ b/.venv/Lib/site-packages/imageio-2.34.1.dist-info/top_level.txt @@ -0,0 +1 @@ +imageio diff --git a/.venv/Lib/site-packages/imageio/__init__.py b/.venv/Lib/site-packages/imageio/__init__.py new file mode 100644 index 00000000..dc9bdba2 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/__init__.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014-2020, imageio contributors +# imageio is distributed under the terms of the (new) BSD License. + +# This docstring is used at the index of the documentation pages, and +# gets inserted into a slightly larger description (in setup.py) for +# the page on Pypi: +""" +Imageio is a Python library that provides an easy interface to read and +write a wide range of image data, including animated images, volumetric +data, and scientific formats. It is cross-platform, runs on Python 3.5+, +and is easy to install. + +Main website: https://imageio.readthedocs.io/ +""" + +# flake8: noqa + +__version__ = "2.34.1" + +import warnings + +# Load some bits from core +from .core import FormatManager, RETURN_BYTES + +# Instantiate the old format manager +formats = FormatManager() +show_formats = formats.show + +from . import v2 +from . import v3 +from . import plugins + +# import config after core to avoid circular import +from . import config + +# import all APIs into the top level (meta API) +from .v2 import ( + imread as imread_v2, + mimread, + volread, + mvolread, + imwrite, + mimwrite, + volwrite, + mvolwrite, + # aliases + get_reader as read, + get_writer as save, + imwrite as imsave, + mimwrite as mimsave, + volwrite as volsave, + mvolwrite as mvolsave, + # misc + help, + get_reader, + get_writer, +) +from .v3 import ( + imopen, + # imread, # Will take over once v3 is released + # imwrite, # Will take over once v3 is released + imiter, +) + + +def imread(uri, format=None, **kwargs): + """imread(uri, format=None, **kwargs) + + Reads an image from the specified file. Returns a numpy array, which + comes with a dict of meta data at its 'meta' attribute. + + Note that the image data is returned as-is, and may not always have + a dtype of uint8 (and thus may differ from what e.g. PIL returns). + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + kwargs : ... + Further keyword arguments are passed to the reader. See :func:`.help` + to see what arguments are available for a particular format. + """ + + warnings.warn( + "Starting with ImageIO v3 the behavior of this function will switch to that of" + " iio.v3.imread. To keep the current behavior (and make this warning disappear)" + " use `import imageio.v2 as imageio` or call `imageio.v2.imread` directly.", + DeprecationWarning, + stacklevel=2, + ) + + return imread_v2(uri, format=format, **kwargs) + + +__all__ = [ + "v2", + "v3", + "config", + "plugins", + # v3 API + "imopen", + "imread", + "imwrite", + "imiter", + # v2 API + "mimread", + "volread", + "mvolread", + "imwrite", + "mimwrite", + "volwrite", + "mvolwrite", + # v2 aliases + "read", + "save", + "imsave", + "mimsave", + "volsave", + "mvolsave", + # functions to deprecate + "help", + "get_reader", + "get_writer", + "formats", + "show_formats", +] diff --git a/.venv/Lib/site-packages/imageio/__main__.py b/.venv/Lib/site-packages/imageio/__main__.py new file mode 100644 index 00000000..ad0ea0b5 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/__main__.py @@ -0,0 +1,169 @@ +""" +Console scripts and associated helper methods for imageio. +""" + +import argparse +import os +from os import path as op +import shutil +import sys + + +from . import plugins +from .core import util + +# A list of plugins that require binaries from the imageio-binaries +# repository. These plugins must implement the `download` method. +PLUGINS_WITH_BINARIES = ["freeimage"] + + +def download_bin(plugin_names=["all"], package_dir=False): + """Download binary dependencies of plugins + + This is a convenience method for downloading the binaries + (e.g. for freeimage) from the imageio-binaries + repository. + + Parameters + ---------- + plugin_names: list + A list of imageio plugin names. If it contains "all", all + binary dependencies are downloaded. + package_dir: bool + If set to `True`, the binaries will be downloaded to the + `resources` directory of the imageio package instead of + to the users application data directory. Note that this + might require administrative rights if imageio is installed + in a system directory. + """ + if plugin_names.count("all"): + # Use all plugins + plugin_names = PLUGINS_WITH_BINARIES + + plugin_names.sort() + print("Ascertaining binaries for: {}.".format(", ".join(plugin_names))) + + if package_dir: + # Download the binaries to the `resources` directory + # of imageio. If imageio comes as an .egg, then a cache + # directory will be created by pkg_resources (requires setuptools). + # see `imageio.core.util.resource_dirs` + # and `imageio.core.utilresource_package_dir` + directory = util.resource_package_dir() + else: + directory = None + + for plg in plugin_names: + if plg not in PLUGINS_WITH_BINARIES: + msg = "Plugin {} not registered for binary download!".format(plg) + raise Exception(msg) + mod = getattr(plugins, plg) + mod.download(directory=directory) + + +def download_bin_main(): + """Argument-parsing wrapper for `download_bin`""" + description = "Download plugin binary dependencies" + phelp = ( + "Plugin name for which to download the binary. " + + "If no argument is given, all binaries are downloaded." + ) + dhelp = ( + "Download the binaries to the package directory " + + "(default is the users application data directory). " + + "This might require administrative rights." + ) + example_text = ( + "examples:\n" + + " imageio_download_bin all\n" + + " imageio_download_bin freeimage\n" + ) + parser = argparse.ArgumentParser( + description=description, + epilog=example_text, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("plugin", type=str, nargs="*", default="all", help=phelp) + parser.add_argument( + "--package-dir", + dest="package_dir", + action="store_true", + default=False, + help=dhelp, + ) + args = parser.parse_args() + download_bin(plugin_names=args.plugin, package_dir=args.package_dir) + + +def remove_bin(plugin_names=["all"]): + """Remove binary dependencies of plugins + + This is a convenience method that removes all binaries + dependencies for plugins downloaded by imageio. + + Notes + ----- + It only makes sense to use this method if the binaries + are corrupt. + """ + if plugin_names.count("all"): + # Use all plugins + plugin_names = PLUGINS_WITH_BINARIES + + print("Removing binaries for: {}.".format(", ".join(plugin_names))) + + rdirs = util.resource_dirs() + + for plg in plugin_names: + if plg not in PLUGINS_WITH_BINARIES: + msg = "Plugin {} not registered for binary download!".format(plg) + raise Exception(msg) + + not_removed = [] + for rd in rdirs: + # plugin name is in subdirectories + for rsub in os.listdir(rd): + if rsub in plugin_names: + plgdir = op.join(rd, rsub) + try: + shutil.rmtree(plgdir) + except Exception: + not_removed.append(plgdir) + if not_removed: + nrs = ",".join(not_removed) + msg2 = ( + "These plugins files could not be removed: {}\n".format(nrs) + + "Make sure they are not used by any program and try again." + ) + raise Exception(msg2) + + +def remove_bin_main(): + """Argument-parsing wrapper for `remove_bin`""" + description = "Remove plugin binary dependencies" + phelp = ( + "Plugin name for which to remove the binary. " + + "If no argument is given, all binaries are removed." + ) + example_text = ( + "examples:\n" + + " imageio_remove_bin all\n" + + " imageio_remove_bin freeimage\n" + ) + parser = argparse.ArgumentParser( + description=description, + epilog=example_text, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("plugin", type=str, nargs="*", default="all", help=phelp) + args = parser.parse_args() + remove_bin(plugin_names=args.plugin) + + +if __name__ == "__main__": + if len(sys.argv) > 1 and sys.argv[1] == "download_bin": + download_bin_main() + elif len(sys.argv) > 1 and sys.argv[1] == "remove_bin": + remove_bin_main() + else: + raise RuntimeError("Invalid use of the imageio CLI") diff --git a/.venv/Lib/site-packages/imageio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/imageio/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..cbc4bad3 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/__pycache__/__main__.cpython-311.pyc b/.venv/Lib/site-packages/imageio/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 00000000..78f9264e Binary files /dev/null and b/.venv/Lib/site-packages/imageio/__pycache__/__main__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/__pycache__/freeze.cpython-311.pyc b/.venv/Lib/site-packages/imageio/__pycache__/freeze.cpython-311.pyc new file mode 100644 index 00000000..8c42821e Binary files /dev/null and b/.venv/Lib/site-packages/imageio/__pycache__/freeze.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/__pycache__/testing.cpython-311.pyc b/.venv/Lib/site-packages/imageio/__pycache__/testing.cpython-311.pyc new file mode 100644 index 00000000..a427e497 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/__pycache__/testing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/__pycache__/typing.cpython-311.pyc b/.venv/Lib/site-packages/imageio/__pycache__/typing.cpython-311.pyc new file mode 100644 index 00000000..963d4314 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/__pycache__/typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/__pycache__/v2.cpython-311.pyc b/.venv/Lib/site-packages/imageio/__pycache__/v2.cpython-311.pyc new file mode 100644 index 00000000..ebb004d3 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/__pycache__/v2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/__pycache__/v3.cpython-311.pyc b/.venv/Lib/site-packages/imageio/__pycache__/v3.cpython-311.pyc new file mode 100644 index 00000000..a29c1a5e Binary files /dev/null and b/.venv/Lib/site-packages/imageio/__pycache__/v3.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/config/__init__.py b/.venv/Lib/site-packages/imageio/config/__init__.py new file mode 100644 index 00000000..ca78dd22 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/config/__init__.py @@ -0,0 +1,16 @@ +from .extensions import ( + extension_list, + known_extensions, + FileExtension, + video_extensions, +) +from .plugins import known_plugins, PluginConfig + +__all__ = [ + "known_plugins", + "PluginConfig", + "extension_list", + "known_extensions", + "FileExtension", + "video_extensions", +] diff --git a/.venv/Lib/site-packages/imageio/config/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/imageio/config/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..d9bf5803 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/config/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/config/__pycache__/extensions.cpython-311.pyc b/.venv/Lib/site-packages/imageio/config/__pycache__/extensions.cpython-311.pyc new file mode 100644 index 00000000..6ae7d583 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/config/__pycache__/extensions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/config/__pycache__/plugins.cpython-311.pyc b/.venv/Lib/site-packages/imageio/config/__pycache__/plugins.cpython-311.pyc new file mode 100644 index 00000000..0bf4ceec Binary files /dev/null and b/.venv/Lib/site-packages/imageio/config/__pycache__/plugins.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/config/extensions.py b/.venv/Lib/site-packages/imageio/config/extensions.py new file mode 100644 index 00000000..46a6415e --- /dev/null +++ b/.venv/Lib/site-packages/imageio/config/extensions.py @@ -0,0 +1,2002 @@ +""" +A set of objects representing each file extension recognized by ImageIO. If an +extension is not listed here it is still supported, as long as there exists a +supporting backend. + +""" + + +class FileExtension: + """File Extension Metadata + + This class holds information about a image file format associated with a + given extension. This information is used to track plugins that are known to + be able to handle a particular format. It also contains additional + information about a format, which is used when creating the supported format + docs. + + Plugins known to be able to handle this format are ordered by a ``priority`` + list. This list is used to determine the ideal plugin to use when choosing a + plugin based on file extension. + + Parameters + ---------- + extension : str + The name of the extension including the initial dot, e.g. ".png". + priority : List + A list of plugin names (entries in config.known_plugins) that can handle + this format. The position of a plugin expresses a preference, e.g. + ["plugin1", "plugin2"] indicates that, if available, plugin1 should be + preferred over plugin2 when handling a request related to this format. + name : str + The full name of the format. + description : str + A description of the format. + external_link : str + A link to further information about the format. Typically, the format's + specification. + volume_support : str + If True, the format/extension supports volumetric image data. + + Examples + -------- + >>> FileExtension( + name="Bitmap", + extension=".bmp", + priority=["pillow", "BMP-PIL", "BMP-FI", "ITK"], + external_link="https://en.wikipedia.org/wiki/BMP_file_format", + ) + + """ + + def __init__( + self, + *, + extension, + priority, + name=None, + description=None, + external_link=None, + volume_support=False + ): + self.extension = extension + self.priority = priority + self.name = name + self.description = description + self.external_link = external_link + self.default_priority = priority.copy() + self.volume_support = volume_support + + def reset(self): + self.priority = self.default_priority.copy() + + +extension_list = [ + FileExtension( + name="Hasselblad raw", + extension=".3fr", + priority=["RAW-FI"], + ), + FileExtension( + name="Sony alpha", + extension=".arw", + priority=["RAW-FI"], + ), + FileExtension( + name="Animated Portable Network Graphics", + external_link="https://en.wikipedia.org/wiki/APNG", + extension=".apng", + priority=["pillow", "pyav"], + ), + FileExtension( + name="Audio Video Interleave", + extension=".avi", + priority=["FFMPEG"], + ), + FileExtension( + name="Casio raw format", + extension=".bay", + priority=["RAW-FI"], + ), + FileExtension( + extension=".blp", + priority=["pillow"], + ), + FileExtension( + name="Bitmap", + extension=".bmp", + priority=["pillow", "BMP-PIL", "BMP-FI", "ITK", "pyav", "opencv"], + external_link="https://en.wikipedia.org/wiki/BMP_file_format", + ), + FileExtension( + name="Device-Independent Bitmap", + extension=".dip", + priority=["opencv"], + external_link="https://en.wikipedia.org/wiki/BMP_file_format", + ), + FileExtension( + name="Re-Volt mipmap", + extension=".bmq", + priority=["RAW-FI"], + ), + FileExtension( + name="Binary Structured Data Format", + extension=".bsdf", + priority=["BSDF"], + external_link="http://bsdf.io/", + ), + FileExtension( + name="Binary Universal Form for the Representation of meteorological data", + extension=".bufr", + priority=["pillow", "BUFR-PIL"], + ), + FileExtension( + name="Silicon Graphics Image", + extension=".bw", + priority=["pillow", "SGI-PIL", "SGI-FI"], + ), + FileExtension( + name="Scirra Construct", + extension=".cap", + priority=["RAW-FI"], + ), + FileExtension( + name="AMETEK High Speed Camera Format", + extension=".cine", + priority=["RAW-FI"], + external_link="https://phantomhighspeed-knowledge.secure.force.com/servlet/fileField?id=0BE1N000000kD2i#:~:text=Cine%20is%20a%20video%20file,camera%20model%20and%20image%20resolution", + ), + FileExtension(extension=".cr2", priority=["RAW-FI"]), + FileExtension( + extension=".crw", + priority=["RAW-FI"], + ), + FileExtension( + extension=".cs1", + priority=["RAW-FI"], + ), + FileExtension( + name="Computerized Tomography", + extension=".ct", + priority=["DICOM"], + ), + FileExtension( + name="Windows Cursor Icons", + extension=".cur", + priority=["pillow", "CUR-PIL"], + ), + FileExtension( + name="Dr. Halo", + extension=".cut", + priority=["CUT-FI"], + ), + FileExtension( + extension=".dc2", + priority=["RAW-FI"], + ), + FileExtension( + name="DICOM file format", + extension=".dcm", + priority=["DICOM", "ITK"], + ), + FileExtension( + extension=".dcr", + priority=["RAW-FI"], + ), + FileExtension( + name="Intel DCX", + extension=".dcx", + priority=["pillow", "DCX-PIL"], + ), + FileExtension( + name="DirectX Texture Container", + extension=".dds", + priority=["pillow", "DDS-FI", "DDS-PIL"], + ), + FileExtension( + name="Windows Bitmap", + extension=".dib", + priority=["pillow", "DIB-PIL"], + ), + FileExtension( + name="DICOM file format", + extension=".dicom", + priority=["ITK"], + ), + FileExtension( + extension=".dng", + priority=["RAW-FI"], + ), + FileExtension( + extension=".drf", + priority=["RAW-FI"], + ), + FileExtension( + extension=".dsc", + priority=["RAW-FI"], + ), + FileExtension( + name="Enhanced Compression Wavelet", + extension=".ecw", + priority=["GDAL"], + ), + FileExtension( + name="Windows Metafile", + extension=".emf", + priority=["pillow", "WMF-PIL"], + ), + FileExtension( + name="Encapsulated Postscript", + extension=".eps", + priority=["pillow", "EPS-PIL"], + ), + FileExtension( + extension=".erf", + priority=["RAW-FI"], + ), + FileExtension( + name="OpenEXR", + extension=".exr", + external_link="https://openexr.readthedocs.io/en/latest/", + priority=["EXR-FI", "pyav", "opencv"], + ), + FileExtension( + extension=".fff", + priority=["RAW-FI"], + ), + FileExtension( + name="Flexible Image Transport System File", + extension=".fit", + priority=["pillow", "FITS-PIL", "FITS"], + ), + FileExtension( + name="Flexible Image Transport System File", + extension=".fits", + priority=["pillow", "FITS-PIL", "FITS", "pyav"], + ), + FileExtension( + name="Autodesk FLC Animation", + extension=".flc", + priority=["pillow", "FLI-PIL"], + ), + FileExtension( + name="Autodesk FLI Animation", + extension=".fli", + priority=["pillow", "FLI-PIL"], + ), + FileExtension( + name="Kodak FlashPix", + extension=".fpx", + priority=["pillow", "FPX-PIL"], + ), + FileExtension( + name="Independence War 2: Edge Of Chaos Texture Format", + extension=".ftc", + priority=["pillow", "FTEX-PIL"], + ), + FileExtension( + name="Flexible Image Transport System File", + extension=".fts", + priority=["FITS"], + ), + FileExtension( + name="Independence War 2: Edge Of Chaos Texture Format", + extension=".ftu", + priority=["pillow", "FTEX-PIL"], + ), + FileExtension( + name="Flexible Image Transport System File", + extension=".fz", + priority=["FITS"], + ), + FileExtension( + name="Raw fax format CCITT G.3", + extension=".g3", + priority=["G3-FI"], + ), + FileExtension( + name="GIMP brush file", + extension=".gbr", + priority=["pillow", "GBR-PIL"], + ), + FileExtension( + name="Grassroots DICOM", + extension=".gdcm", + priority=["ITK"], + ), + FileExtension( + name="Graphics Interchange Format", + extension=".gif", + priority=["pillow", "GIF-PIL", "pyav"], + ), + FileExtension( + name="UMDS GIPL", + extension=".gipl", + priority=["ITK"], + ), + FileExtension( + name="gridded meteorological data", + extension=".grib", + priority=["pillow", "GRIB-PIL"], + ), + FileExtension( + name="Hierarchical Data Format 5", + extension=".h5", + priority=["pillow", "HDF5-PIL"], + ), + FileExtension( + name="Hierarchical Data Format 5", + extension=".hdf", + priority=["pillow", "HDF5-PIL"], + ), + FileExtension( + name="Hierarchical Data Format 5", + extension=".hdf5", + priority=["ITK"], + ), + FileExtension( + name="JPEG Extended Range", + extension=".hdp", + priority=["JPEG-XR-FI"], + ), + FileExtension( + name="High Dynamic Range Image", + extension=".hdr", + priority=["HDR-FI", "ITK", "opencv"], + ), + FileExtension( + extension=".ia", + priority=["RAW-FI"], + ), + FileExtension( + extension=".icb", + priority=["pillow"], + ), + FileExtension( + name="Mac OS Icon File", + extension=".icns", + priority=["pillow", "ICNS-PIL"], + ), + FileExtension( + name="Windows Icon File", + extension=".ico", + priority=["pillow", "ICO-FI", "ICO-PIL", "pyav"], + ), + FileExtension( + name="ILBM Interleaved Bitmap", + extension=".iff", + priority=["IFF-FI"], + ), + FileExtension( + name="IPTC/NAA", + extension=".iim", + priority=["pillow", "IPTC-PIL"], + ), + FileExtension( + extension=".iiq", + priority=["RAW-FI"], + ), + FileExtension( + name="IFUNC Image Memory", + extension=".im", + priority=["pillow", "IM-PIL"], + ), + FileExtension( + extension=".img", + priority=["ITK", "GDAL"], + ), + FileExtension( + extension=".img.gz", + priority=["ITK"], + ), + FileExtension( + name="IM Tools", + extension=".IMT", + priority=["pillow", "IMT-PIL"], + ), + FileExtension( + name="Image Processing Lab", + extension=".ipl", + priority=["ITK"], + ), + FileExtension( + name="JPEG 2000", + extension=".j2c", + priority=["pillow", "J2K-FI", "JPEG2000-PIL", "pyav"], + ), + FileExtension( + name="JPEG 2000", + extension=".j2k", + priority=["pillow", "J2K-FI", "JPEG2000-PIL", "pyav"], + ), + FileExtension( + name="JPEG", + extension=".jfif", + priority=["pillow", "JPEG-PIL"], + ), + FileExtension( + name="JPEG", + extension=".jif", + priority=["JPEG-FI"], + ), + FileExtension( + name="JPEG Network Graphics", + extension=".jng", + priority=["JNG-FI"], + ), + FileExtension( + name="JPEG 2000", + extension=".jp2", + priority=["pillow", "JP2-FI", "JPEG2000-PIL", "pyav", "opencv"], + ), + FileExtension( + name="JPEG 2000", + extension=".jpc", + priority=["pillow", "JPEG2000-PIL"], + ), + FileExtension( + name="JPEG", + extension=".jpe", + priority=["pillow", "JPEG-FI", "JPEG-PIL", "opencv"], + ), + FileExtension( + name="Joint Photographic Experts Group", + extension=".jpeg", + priority=["pillow", "JPEG-PIL", "JPEG-FI", "ITK", "GDAL", "pyav", "opencv"], + ), + FileExtension( + name="JPEG 2000", + extension=".jpf", + priority=["pillow", "JPEG2000-PIL"], + ), + FileExtension( + name="Joint Photographic Experts Group", + extension=".jpg", + priority=["pillow", "JPEG-PIL", "JPEG-FI", "ITK", "GDAL", "pyav", "opencv"], + ), + FileExtension( + name="JPEG 2000", + extension=".jpx", + priority=["pillow", "JPEG2000-PIL"], + ), + FileExtension( + name="JPEG Extended Range", + extension=".jxr", + priority=["JPEG-XR-FI"], + ), + FileExtension( + extension=".k25", + priority=["RAW-FI"], + ), + FileExtension( + extension=".kc2", + priority=["RAW-FI"], + ), + FileExtension( + extension=".kdc", + priority=["RAW-FI"], + ), + FileExtension( + name="C64 Koala Graphics", + extension=".koa", + priority=["KOALA-FI"], + ), + FileExtension( + name="ILBM Interleaved Bitmap", + extension=".lbm", + priority=["IFF-FI"], + ), + FileExtension( + name="Lytro F01", + extension=".lfp", + priority=["LYTRO-LFP"], + ), + FileExtension( + name="Lytro Illum", + extension=".lfr", + priority=["LYTRO-LFR"], + ), + FileExtension( + name="ZEISS LSM", + extension=".lsm", + priority=["tifffile", "ITK", "TIFF"], + ), + FileExtension( + name="McIdas area file", + extension=".MCIDAS", + priority=["pillow", "MCIDAS-PIL"], + external_link="https://www.ssec.wisc.edu/mcidas/doc/prog_man/2003print/progman2003-formats.html", + ), + FileExtension( + extension=".mdc", + priority=["RAW-FI"], + ), + FileExtension( + extension=".mef", + priority=["RAW-FI"], + ), + FileExtension( + name="FreeSurfer File Format", + extension=".mgh", + priority=["ITK"], + ), + FileExtension( + name="ITK MetaImage", + extension=".mha", + priority=["ITK"], + ), + FileExtension( + name="ITK MetaImage Header", + extension=".mhd", + priority=["ITK"], + ), + FileExtension( + name="Microsoft Image Composer", + extension=".mic", + priority=["pillow", "MIC-PIL"], + ), + FileExtension( + name="Matroska Multimedia Container", + extension=".mkv", + priority=["FFMPEG", "pyav"], + ), + FileExtension( + name="Medical Imaging NetCDF", + extension=".mnc", + priority=["ITK"], + ), + FileExtension( + name="Medical Imaging NetCDF 2", + extension=".mnc2", + priority=["ITK"], + ), + FileExtension( + name="Leaf Raw Image Format", + extension=".mos", + priority=["RAW-FI"], + ), + FileExtension( + name="QuickTime File Format", + extension=".mov", + priority=["FFMPEG", "pyav"], + ), + FileExtension( + name="MPEG-4 Part 14", + extension=".mp4", + priority=["FFMPEG", "pyav"], + ), + FileExtension( + name="MPEG-1 Moving Picture Experts Group", + extension=".mpeg", + priority=["FFMPEG", "pyav"], + ), + FileExtension( + name="Moving Picture Experts Group", + extension=".mpg", + priority=["pillow", "FFMPEG", "pyav"], + ), + FileExtension( + name="JPEG Multi-Picture Format", + extension=".mpo", + priority=["pillow", "MPO-PIL"], + ), + FileExtension( + name="Magnetic resonance imaging", + extension=".mri", + priority=["DICOM"], + ), + FileExtension( + extension=".mrw", + priority=["RAW-FI"], + ), + FileExtension( + name="Windows Paint", + extension=".msp", + priority=["pillow", "MSP-PIL"], + ), + FileExtension( + extension=".nef", + priority=["RAW-FI"], + ), + FileExtension( + extension=".nhdr", + priority=["ITK"], + ), + FileExtension( + extension=".nia", + priority=["ITK"], + ), + FileExtension( + extension=".nii", + priority=["ITK"], + ), + FileExtension( + name="nii.gz", + extension=".nii.gz", + priority=["ITK"], + ), + FileExtension( + name="Numpy Array", + extension=".npz", + priority=["NPZ"], + volume_support=True, + ), + FileExtension( + extension=".nrrd", + priority=["ITK"], + ), + FileExtension( + extension=".nrw", + priority=["RAW-FI"], + ), + FileExtension( + extension=".orf", + priority=["RAW-FI"], + ), + FileExtension( + extension=".palm", + priority=["pillow"], + ), + FileExtension( + name="Portable Bitmap", + extension=".pbm", + priority=["PGM-FI", "PGMRAW-FI", "pyav", "opencv"], + ), + FileExtension( + name="Kodak PhotoCD", + extension=".pcd", + priority=["pillow", "PCD-FI", "PCD-PIL"], + ), + FileExtension( + name="Macintosh PICT", + extension=".pct", + priority=["PICT-FI"], + ), + FileExtension( + name="Zsoft Paintbrush", + extension=".PCX", + priority=["pillow", "PCX-FI", "PCX-PIL"], + ), + FileExtension( + extension=".pdf", + priority=["pillow"], + ), + FileExtension( + extension=".pef", + priority=["RAW-FI"], + ), + FileExtension( + extension=".pfm", + priority=["PFM-FI", "pyav", "opencv"], + ), + FileExtension( + name="Portable Greymap", + extension=".pgm", + priority=["pillow", "PGM-FI", "PGMRAW-FI", "pyav", "opencv"], + ), + FileExtension( + name="Macintosh PICT", + extension=".pic", + priority=["PICT-FI", "ITK", "opencv"], + ), + FileExtension( + name="Macintosh PICT", + extension=".pict", + priority=["PICT-FI"], + ), + FileExtension( + name="Portable Network Graphics", + extension=".png", + priority=["pillow", "PNG-PIL", "PNG-FI", "ITK", "pyav", "opencv"], + ), + FileExtension( + name="Portable Image Format", + extension=".pnm", + priority=["pillow", "opencv"], + ), + FileExtension( + name="Pbmplus image", + extension=".ppm", + priority=["pillow", "PPM-PIL", "pyav"], + ), + FileExtension( + name="Pbmplus image", + extension=".pbm", + priority=["pillow", "PPM-PIL", "PPM-FI"], + ), + FileExtension( + name="Portable image format", + extension=".pxm", + priority=["opencv"], + ), + FileExtension( + name="Portable Pixelmap (ASCII)", + extension=".ppm", + priority=["PPM-FI", "opencv"], + ), + FileExtension( + name="Portable Pixelmap (Raw)", + extension=".ppm", + priority=["PPMRAW-FI"], + ), + FileExtension( + name="Ghostscript", + extension=".ps", + priority=["pillow", "EPS-PIL"], + ), + FileExtension( + name="Adope Photoshop 2.5 and 3.0", + extension=".psd", + priority=["pillow", "PSD-PIL", "PSD-FI"], + ), + FileExtension( + extension=".ptx", + priority=["RAW-FI"], + ), + FileExtension( + extension=".pxn", + priority=["RAW-FI"], + ), + FileExtension( + name="PIXAR raster image", + extension=".pxr", + priority=["pillow", "PIXAR-PIL"], + ), + FileExtension( + extension=".qtk", + priority=["RAW-FI"], + ), + FileExtension( + extension=".raf", + priority=["RAW-FI"], + ), + FileExtension( + name="Sun Raster File", + extension=".ras", + priority=["pillow", "SUN-PIL", "RAS-FI", "pyav", "opencv"], + ), + FileExtension( + name="Sun Raster File", + extension=".sr", + priority=["opencv"], + ), + FileExtension( + extension=".raw", + priority=["RAW-FI", "LYTRO-ILLUM-RAW", "LYTRO-F01-RAW"], + ), + FileExtension( + extension=".rdc", + priority=["RAW-FI"], + ), + FileExtension( + name="Silicon Graphics Image", + extension=".rgb", + priority=["pillow", "SGI-PIL"], + ), + FileExtension( + name="Silicon Graphics Image", + extension=".rgba", + priority=["pillow", "SGI-PIL"], + ), + FileExtension( + extension=".rw2", + priority=["RAW-FI"], + ), + FileExtension( + extension=".rwl", + priority=["RAW-FI"], + ), + FileExtension( + extension=".rwz", + priority=["RAW-FI"], + ), + FileExtension( + name="Silicon Graphics Image", + extension=".sgi", + priority=["pillow", "SGI-PIL", "pyav"], + ), + FileExtension( + name="SPE File Format", + extension=".spe", + priority=["SPE"], + ), + FileExtension( + extension=".SPIDER", + priority=["pillow", "SPIDER-PIL"], + ), + FileExtension( + extension=".sr2", + priority=["RAW-FI"], + ), + FileExtension( + extension=".srf", + priority=["RAW-FI"], + ), + FileExtension( + extension=".srw", + priority=["RAW-FI"], + ), + FileExtension( + extension=".sti", + priority=["RAW-FI"], + ), + FileExtension( + extension=".stk", + priority=["tifffile", "TIFF"], + ), + FileExtension( + name="ShockWave Flash", + extension=".swf", + priority=["SWF", "pyav"], + ), + FileExtension( + name="Truevision TGA", + extension=".targa", + priority=["pillow", "TARGA-FI"], + ), + FileExtension( + name="Truevision TGA", + extension=".tga", + priority=["pillow", "TGA-PIL", "TARGA-FI", "pyav"], + ), + FileExtension( + name="Tagged Image File", + extension=".tif", + priority=[ + "tifffile", + "TIFF", + "pillow", + "TIFF-PIL", + "TIFF-FI", + "FEI", + "ITK", + "GDAL", + "pyav", + "opencv", + ], + volume_support=True, + ), + FileExtension( + name="Tagged Image File Format", + extension=".tiff", + priority=[ + "tifffile", + "TIFF", + "pillow", + "TIFF-PIL", + "TIFF-FI", + "FEI", + "ITK", + "GDAL", + "pyav", + "opencv", + ], + volume_support=True, + ), + FileExtension( + extension=".vda", + priority=["pillow"], + ), + FileExtension( + extension=".vst", + priority=["pillow"], + ), + FileExtension( + extension=".vtk", + priority=["ITK"], + ), + FileExtension( + name="Wireless Bitmap", + extension=".wap", + priority=["WBMP-FI"], + ), + FileExtension( + name="Wireless Bitmap", + extension=".wbm", + priority=["WBMP-FI"], + ), + FileExtension( + name="Wireless Bitmap", + extension=".wbmp", + priority=["WBMP-FI"], + ), + FileExtension( + name="JPEG Extended Range", + extension=".wdp", + priority=["JPEG-XR-FI"], + ), + FileExtension( + name="Matroska", + extension=".webm", + priority=["FFMPEG", "pyav"], + ), + FileExtension( + name="Google WebP", + extension=".webp", + priority=["pillow", "WEBP-FI", "pyav", "opencv"], + ), + FileExtension( + name="Windows Meta File", + extension=".wmf", + priority=["pillow", "WMF-PIL"], + ), + FileExtension( + name="Windows Media Video", + extension=".wmv", + priority=["FFMPEG"], + ), + FileExtension( + name="X11 Bitmap", + extension=".xbm", + priority=["pillow", "XBM-PIL", "XBM-FI", "pyav"], + ), + FileExtension( + name="X11 Pixel Map", + extension=".xpm", + priority=["pillow", "XPM-PIL", "XPM-FI"], + ), + FileExtension( + name="Thumbnail Image", + extension=".XVTHUMB", + priority=["pillow", "XVTHUMB-PIL"], + ), + FileExtension( + extension=".dpx", + priority=["pyav"], + ), + FileExtension( + extension=".im1", + priority=["pyav"], + ), + FileExtension( + extension=".im24", + priority=["pyav"], + ), + FileExtension( + extension=".im8", + priority=["pyav"], + ), + FileExtension( + extension=".jls", + priority=["pyav"], + ), + FileExtension( + extension=".ljpg", + priority=["pyav"], + ), + FileExtension( + extension=".pam", + priority=["pyav"], + ), + FileExtension( + extension=".pcx", + priority=["pyav"], + ), + FileExtension( + extension=".pgmyuv", + priority=["pyav"], + ), + FileExtension( + extension=".pix", + priority=["pyav"], + ), + FileExtension( + extension=".ppm", + priority=["pyav"], + ), + FileExtension( + extension=".rs", + priority=["pyav"], + ), + FileExtension( + extension=".sun", + priority=["pyav"], + ), + FileExtension( + extension=".sunras", + priority=["pyav"], + ), + FileExtension( + extension=".xface", + priority=["pyav"], + ), + FileExtension( + extension=".xwd", + priority=["pyav"], + ), + FileExtension( + extension=".y", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".3g2", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".3gp", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".f4v", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".ism", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".isma", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".ismv", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".m4a", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".m4b", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".mj2", + priority=["pyav"], + ), + FileExtension( + name="3GP (3GPP file format)", + extension=".psp", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".3g2", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".3gp", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".f4v", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".ism", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".isma", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".ismv", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".m4a", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".m4b", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".mj2", + priority=["pyav"], + ), + FileExtension( + name="3GP2 (3GPP2 file format)", + extension=".psp", + priority=["pyav"], + ), + FileExtension( + name="3GPP AMR", + extension=".amr", + priority=["pyav"], + ), + FileExtension( + name="a64 - video for Commodore 64", + extension=".A64", + priority=["pyav"], + ), + FileExtension( + name="a64 - video for Commodore 64", + extension=".a64", + priority=["pyav"], + ), + FileExtension( + name="Adobe Filmstrip", + extension=".flm", + priority=["pyav"], + ), + FileExtension( + name="AMV", + extension=".amv", + priority=["pyav"], + ), + FileExtension( + name="ASF (Advanced / Active Streaming Format)", + extension=".asf", + priority=["pyav"], + ), + FileExtension( + name="ASF (Advanced / Active Streaming Format)", + extension=".asf", + priority=["pyav"], + ), + FileExtension( + name="ASF (Advanced / Active Streaming Format)", + extension=".wmv", + priority=["pyav"], + ), + FileExtension( + name="ASF (Advanced / Active Streaming Format)", + extension=".wmv", + priority=["pyav"], + ), + FileExtension( + name="AV1 Annex B", + extension=".obu", + priority=["pyav"], + ), + FileExtension( + name="AV1 low overhead OBU", + extension=".obu", + priority=["pyav"], + ), + FileExtension( + name="AVI (Audio Video Interleaved)", + extension=".avi", + priority=["pyav"], + ), + FileExtension( + name="AVR (Audio Visual Research)", + extension=".avr", + priority=["pyav"], + ), + FileExtension( + name="Beam Software SIFF", + extension=".vb", + priority=["pyav"], + ), + FileExtension( + name="CD Graphics", + extension=".cdg", + priority=["pyav"], + ), + FileExtension( + name="Commodore CDXL video", + extension=".cdxl", + priority=["pyav"], + ), + FileExtension( + name="Commodore CDXL video", + extension=".xl", + priority=["pyav"], + ), + FileExtension( + name="DASH Muxer", + extension=".mpd", + priority=["pyav"], + ), + FileExtension( + name="Digital Pictures SGA", + extension=".sga", + priority=["pyav"], + ), + FileExtension( + name="Discworld II BMV", + extension=".bmv", + priority=["pyav"], + ), + FileExtension( + name="DV (Digital Video)", + extension=".dif", + priority=["pyav"], + ), + FileExtension( + name="DV (Digital Video)", + extension=".dv", + priority=["pyav"], + ), + FileExtension( + name="F4V Adobe Flash Video", + extension=".f4v", + priority=["pyav"], + ), + FileExtension( + name="FLV (Flash Video)", + extension=".flv", + priority=["pyav"], + ), + FileExtension( + name="GXF (General eXchange Format)", + extension=".gxf", + priority=["pyav"], + ), + FileExtension( + name="iCE Draw File", + extension=".idf", + priority=["pyav"], + ), + FileExtension( + name="IFV CCTV DVR", + extension=".ifv", + priority=["pyav"], + ), + FileExtension( + name="iPod H.264 MP4 (MPEG-4 Part 14)", + extension=".m4a", + priority=["pyav"], + ), + FileExtension( + name="iPod H.264 MP4 (MPEG-4 Part 14)", + extension=".m4b", + priority=["pyav"], + ), + FileExtension( + name="iPod H.264 MP4 (MPEG-4 Part 14)", + extension=".m4v", + priority=["pyav"], + ), + FileExtension( + name="IVR (Internet Video Recording)", + extension=".ivr", + priority=["pyav"], + ), + FileExtension( + name="Konami PS2 SVAG", + extension=".svag", + priority=["pyav"], + ), + FileExtension( + name="KUX (YouKu)", + extension=".kux", + priority=["pyav"], + ), + FileExtension( + name="live RTMP FLV (Flash Video)", + extension=".flv", + priority=["pyav"], + ), + FileExtension( + name="Loki SDL MJPEG", + extension=".mjpg", + priority=["pyav"], + ), + FileExtension( + name="LVF", + extension=".lvf", + priority=["pyav"], + ), + FileExtension( + name="Matroska / WebM", + extension=".mk3d", + priority=["pyav"], + ), + FileExtension( + name="Matroska / WebM", + extension=".mka", + priority=["pyav"], + ), + FileExtension( + name="Matroska / WebM", + extension=".mks", + priority=["pyav"], + ), + FileExtension( + name="Microsoft XMV", + extension=".xmv", + priority=["pyav"], + ), + FileExtension( + name="MIME multipart JPEG", + extension=".mjpg", + priority=["pyav"], + ), + FileExtension( + name="MobiClip MODS", + extension=".mods", + priority=["pyav"], + ), + FileExtension( + name="MobiClip MOFLEX", + extension=".moflex", + priority=["pyav"], + ), + FileExtension( + name="Motion Pixels MVI", + extension=".mvi", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".3g2", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".3gp", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".f4v", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".ism", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".isma", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".ismv", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".m4a", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".m4b", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".mj2", + priority=["pyav"], + ), + FileExtension( + name="MP4 (MPEG-4 Part 14)", + extension=".psp", + priority=["pyav"], + ), + FileExtension( + name="MPEG-2 PS (DVD VOB)", + extension=".dvd", + priority=["pyav"], + ), + FileExtension( + name="MPEG-2 PS (SVCD)", + extension=".vob", + priority=["pyav"], + ), + FileExtension( + name="MPEG-2 PS (VOB)", + extension=".vob", + priority=["pyav"], + ), + FileExtension( + name="MPEG-TS (MPEG-2 Transport Stream)", + extension=".m2t", + priority=["pyav"], + ), + FileExtension( + name="MPEG-TS (MPEG-2 Transport Stream)", + extension=".m2ts", + priority=["pyav"], + ), + FileExtension( + name="MPEG-TS (MPEG-2 Transport Stream)", + extension=".mts", + priority=["pyav"], + ), + FileExtension( + name="MPEG-TS (MPEG-2 Transport Stream)", + extension=".ts", + priority=["pyav"], + ), + FileExtension( + name="Musepack", + extension=".mpc", + priority=["pyav"], + ), + FileExtension( + name="MXF (Material eXchange Format) Operational Pattern Atom", + extension=".mxf", + priority=["pyav"], + ), + FileExtension( + name="MXF (Material eXchange Format)", + extension=".mxf", + priority=["pyav"], + ), + FileExtension( + name="MxPEG clip", + extension=".mxg", + priority=["pyav"], + ), + FileExtension( + name="NC camera feed", + extension=".v", + priority=["pyav"], + ), + FileExtension( + name="NUT", + extension=".nut", + priority=["pyav"], + ), + FileExtension( + name="Ogg Video", + extension=".ogv", + priority=["pyav"], + ), + FileExtension( + name="Ogg", + extension=".ogg", + priority=["pyav"], + ), + FileExtension( + name="On2 IVF", + extension=".ivf", + priority=["pyav"], + ), + FileExtension( + name="PSP MP4 (MPEG-4 Part 14)", + extension=".psp", + priority=["pyav"], + ), + FileExtension( + name="Psygnosis YOP", + extension=".yop", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".3g2", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".3gp", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".f4v", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".ism", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".isma", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".ismv", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".m4a", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".m4b", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".mj2", + priority=["pyav"], + ), + FileExtension( + name="QuickTime / MOV", + extension=".psp", + priority=["pyav"], + ), + FileExtension( + name="raw AVS2-P2/IEEE1857.4 video", + extension=".avs", + priority=["pyav"], + ), + FileExtension( + name="raw AVS2-P2/IEEE1857.4 video", + extension=".avs2", + priority=["pyav"], + ), + FileExtension( + name="raw AVS3-P2/IEEE1857.10", + extension=".avs3", + priority=["pyav"], + ), + FileExtension( + name="raw Chinese AVS (Audio Video Standard) video", + extension=".cavs", + priority=["pyav"], + ), + FileExtension( + name="raw Dirac", + extension=".drc", + priority=["pyav"], + ), + FileExtension( + name="raw Dirac", + extension=".vc2", + priority=["pyav"], + ), + FileExtension( + name="raw DNxHD (SMPTE VC-3)", + extension=".dnxhd", + priority=["pyav"], + ), + FileExtension( + name="raw DNxHD (SMPTE VC-3)", + extension=".dnxhr", + priority=["pyav"], + ), + FileExtension( + name="raw GSM", + extension=".gsm", + priority=["pyav"], + ), + FileExtension( + name="raw H.261", + extension=".h261", + priority=["pyav"], + ), + FileExtension( + name="raw H.263", + extension=".h263", + priority=["pyav"], + ), + FileExtension( + name="raw H.264 video", + extension=".264", + priority=["pyav"], + ), + FileExtension( + name="raw H.264 video", + extension=".avc", + priority=["pyav"], + ), + FileExtension( + name="raw H.264 video", + extension=".h264", + priority=["pyav", "FFMPEG"], + ), + FileExtension( + name="raw H.264 video", + extension=".h26l", + priority=["pyav"], + ), + FileExtension( + name="raw HEVC video", + extension=".265", + priority=["pyav"], + ), + FileExtension( + name="raw HEVC video", + extension=".h265", + priority=["pyav"], + ), + FileExtension( + name="raw HEVC video", + extension=".hevc", + priority=["pyav"], + ), + FileExtension( + name="raw id RoQ", + extension=".roq", + priority=["pyav"], + ), + FileExtension( + name="raw Ingenient MJPEG", + extension=".cgi", + priority=["pyav"], + ), + FileExtension( + name="raw IPU Video", + extension=".ipu", + priority=["pyav"], + ), + FileExtension( + name="raw MJPEG 2000 video", + extension=".j2k", + priority=["pyav"], + ), + FileExtension( + name="raw MJPEG video", + extension=".mjpeg", + priority=["pyav"], + ), + FileExtension( + name="raw MJPEG video", + extension=".mjpg", + priority=["pyav"], + ), + FileExtension( + name="raw MJPEG video", + extension=".mpo", + priority=["pyav"], + ), + FileExtension( + name="raw MPEG-1 video", + extension=".m1v", + priority=["pyav"], + ), + FileExtension( + name="raw MPEG-1 video", + extension=".mpeg", + priority=["pyav"], + ), + FileExtension( + name="raw MPEG-1 video", + extension=".mpg", + priority=["pyav"], + ), + FileExtension( + name="raw MPEG-2 video", + extension=".m2v", + priority=["pyav"], + ), + FileExtension( + name="raw MPEG-4 video", + extension=".m4v", + priority=["pyav"], + ), + FileExtension( + name="raw VC-1 video", + extension=".vc1", + priority=["pyav"], + ), + FileExtension( + name="raw video", + extension=".cif", + priority=["pyav"], + ), + FileExtension( + name="raw video", + extension=".qcif", + priority=["pyav"], + ), + FileExtension( + name="raw video", + extension=".rgb", + priority=["pyav"], + ), + FileExtension( + name="raw video", + extension=".yuv", + priority=["pyav"], + ), + FileExtension( + name="RealMedia", + extension=".rm", + priority=["pyav"], + ), + FileExtension( + name="SDR2", + extension=".sdr2", + priority=["pyav"], + ), + FileExtension( + name="Sega FILM / CPK", + extension=".cpk", + priority=["pyav"], + ), + FileExtension( + name="SER (Simple uncompressed video format for astronomical capturing)", + extension=".ser", + priority=["pyav"], + ), + FileExtension( + name="Simbiosis Interactive IMX", + extension=".imx", + priority=["pyav"], + ), + FileExtension( + name="Square SVS", + extension=".svs", + priority=["tifffile", "pyav"], + ), + FileExtension( + name="TiVo TY Stream", + extension=".ty", + priority=["pyav"], + ), + FileExtension( + name="TiVo TY Stream", + extension=".ty+", + priority=["pyav"], + ), + FileExtension( + name="Uncompressed 4:2:2 10-bit", + extension=".v210", + priority=["pyav"], + ), + FileExtension( + name="Uncompressed 4:2:2 10-bit", + extension=".yuv10", + priority=["pyav"], + ), + FileExtension( + name="VC-1 test bitstream", + extension=".rcv", + priority=["pyav"], + ), + FileExtension( + name="Video CCTV DAT", + extension=".dat", + priority=["pyav"], + ), + FileExtension( + name="Video DAV", + extension=".dav", + priority=["pyav"], + ), + FileExtension( + name="Vivo", + extension=".viv", + priority=["pyav"], + ), + FileExtension( + name="WebM Chunk Muxer", + extension=".chk", + priority=["pyav"], + ), + FileExtension( + name="WebM", + extension=".mk3d", + priority=["pyav"], + ), + FileExtension( + name="WebM", + extension=".mka", + priority=["pyav"], + ), + FileExtension( + name="WebM", + extension=".mks", + priority=["pyav"], + ), + FileExtension( + name="Windows Television (WTV)", + extension=".wtv", + priority=["pyav"], + ), + FileExtension( + name="Xilam DERF", + extension=".adp", + priority=["pyav"], + ), + FileExtension( + name="YUV4MPEG pipe", + extension=".y4m", + priority=["pyav"], + ), + FileExtension( + extension=".qpi", + priority=["tifffile"], + ), + FileExtension( + name="PCO Camera", + extension=".pcoraw", + priority=["tifffile"], + ), + FileExtension( + name="PCO Camera", + extension=".rec", + priority=["tifffile"], + ), + FileExtension( + name="Perkin Elmer Vectra", + extension=".qptiff", + priority=["tifffile"], + ), + FileExtension( + name="Pyramid Encoded TIFF", + extension=".ptiff", + priority=["tifffile"], + ), + FileExtension( + name="Pyramid Encoded TIFF", + extension=".ptif", + priority=["tifffile"], + ), + FileExtension( + name="Opticks Gel", + extension=".gel", + priority=["tifffile"], + ), + FileExtension( + name="Zoomify Image Format", + extension=".zif", + priority=["tifffile"], + ), + FileExtension( + name="Hamamatsu Slide Scanner", + extension=".ndpi", + priority=["tifffile"], + ), + FileExtension( + name="Roche Digital Pathology", + extension=".bif", + priority=["tifffile"], + ), + FileExtension( + extension=".tf8", + priority=["tifffile"], + ), + FileExtension( + extension=".btf", + priority=["tifffile"], + ), + FileExtension( + name="High Efficiency Image File Format", + extension=".heic", + priority=["pillow"], + ), + FileExtension( + name="AV1 Image File Format", + extension=".avif", + priority=["pillow"], + ), +] +extension_list.sort(key=lambda x: x.extension) + + +known_extensions = dict() +for ext in extension_list: + if ext.extension not in known_extensions: + known_extensions[ext.extension] = list() + known_extensions[ext.extension].append(ext) + +extension_list = [ext for ext_list in known_extensions.values() for ext in ext_list] + +_video_extension_strings = [ + ".264", + ".265", + ".3g2", + ".3gp", + ".a64", + ".A64", + ".adp", + ".amr", + ".amv", + ".asf", + ".avc", + ".avi", + ".avr", + ".avs", + ".avs2", + ".avs3", + ".bmv", + ".cavs", + ".cdg", + ".cdxl", + ".cgi", + ".chk", + ".cif", + ".cpk", + ".dat", + ".dav", + ".dif", + ".dnxhd", + ".dnxhr", + ".drc", + ".dv", + ".dvd", + ".f4v", + ".flm", + ".flv", + ".gsm", + ".gxf", + ".h261", + ".h263", + ".h264", + ".h265", + ".h26l", + ".hevc", + ".idf", + ".ifv", + ".imx", + ".ipu", + ".ism", + ".isma", + ".ismv", + ".ivf", + ".ivr", + ".j2k", + ".kux", + ".lvf", + ".m1v", + ".m2t", + ".m2ts", + ".m2v", + ".m4a", + ".m4b", + ".m4v", + ".mj2", + ".mjpeg", + ".mjpg", + ".mk3d", + ".mka", + ".mks", + ".mkv", + ".mods", + ".moflex", + ".mov", + ".mp4", + ".mpc", + ".mpd", + ".mpeg", + ".mpg", + ".mpo", + ".mts", + ".mvi", + ".mxf", + ".mxg", + ".nut", + ".obu", + ".ogg", + ".ogv", + ".psp", + ".qcif", + ".rcv", + ".rgb", + ".rm", + ".roq", + ".sdr2", + ".ser", + ".sga", + ".svag", + ".svs", + ".ts", + ".ty", + ".ty+", + ".v", + ".v210", + ".vb", + ".vc1", + ".vc2", + ".viv", + ".vob", + ".webm", + ".wmv", + ".wtv", + ".xl", + ".xmv", + ".y4m", + ".yop", + ".yuv", + ".yuv10", +] +video_extensions = list() +for ext_string in _video_extension_strings: + formats = known_extensions[ext_string] + video_extensions.append(formats[0]) +video_extensions.sort(key=lambda x: x.extension) diff --git a/.venv/Lib/site-packages/imageio/config/extensions.pyi b/.venv/Lib/site-packages/imageio/config/extensions.pyi new file mode 100644 index 00000000..266d0632 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/config/extensions.pyi @@ -0,0 +1,24 @@ +from typing import List, Dict, Optional + +class FileExtension: + extension: str + priority: List[str] + name: Optional[str] = None + description: Optional[str] = None + external_link: Optional[str] = None + volume_support: bool + + def __init__( + self, + *, + extension: str, + priority: List[str], + name: str = None, + description: str = None, + external_link: str = None + ) -> None: ... + def reset(self) -> None: ... + +extension_list: List[FileExtension] +known_extensions: Dict[str, List[FileExtension]] +video_extensions: List[FileExtension] diff --git a/.venv/Lib/site-packages/imageio/config/plugins.py b/.venv/Lib/site-packages/imageio/config/plugins.py new file mode 100644 index 00000000..e55303a8 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/config/plugins.py @@ -0,0 +1,780 @@ +import importlib + +from ..core.legacy_plugin_wrapper import LegacyPlugin + + +class PluginConfig: + """Plugin Configuration Metadata + + This class holds the information needed to lazy-import plugins. + + Parameters + ---------- + name : str + The name of the plugin. + class_name : str + The name of the plugin class inside the plugin module. + module_name : str + The name of the module/package from which to import the plugin. + is_legacy : bool + If True, this plugin is a v2 plugin and will be wrapped in a + LegacyPlugin. Default: False. + package_name : str + If the given module name points to a relative module, then the package + name determines the package it is relative to. + install_name : str + The name of the optional dependency that can be used to install this + plugin if it is missing. + legacy_args : Dict + A dictionary of kwargs to pass to the v2 plugin (Format) upon construction. + + Examples + -------- + >>> PluginConfig( + name="TIFF", + class_name="TiffFormat", + module_name="imageio.plugins.tifffile", + is_legacy=True, + install_name="tifffile", + legacy_args={ + "description": "TIFF format", + "extensions": ".tif .tiff .stk .lsm", + "modes": "iIvV", + }, + ) + >>> PluginConfig( + name="pillow", + class_name="PillowPlugin", + module_name="imageio.plugins.pillow" + ) + + """ + + def __init__( + self, + name, + class_name, + module_name, + *, + is_legacy=False, + package_name=None, + install_name=None, + legacy_args=None, + ): + legacy_args = legacy_args or dict() + + self.name = name + self.class_name = class_name + self.module_name = module_name + self.package_name = package_name + + self.is_legacy = is_legacy + self.install_name = install_name or self.name + self.legacy_args = {"name": name, "description": "A legacy plugin"} + self.legacy_args.update(legacy_args) + + @property + def format(self): + """For backwards compatibility with FormatManager + + Delete when migrating to v3 + """ + if not self.is_legacy: + raise RuntimeError("Can only get format for legacy plugins.") + + module = importlib.import_module(self.module_name, self.package_name) + clazz = getattr(module, self.class_name) + return clazz(**self.legacy_args) + + @property + def plugin_class(self): + """Get the plugin class (import if needed) + + Returns + ------- + plugin_class : Any + The class that can be used to instantiate plugins. + + """ + + module = importlib.import_module(self.module_name, self.package_name) + clazz = getattr(module, self.class_name) + + if self.is_legacy: + legacy_plugin = clazz(**self.legacy_args) + + def partial_legacy_plugin(request): + return LegacyPlugin(request, legacy_plugin) + + clazz = partial_legacy_plugin + + return clazz + + +known_plugins = dict() +known_plugins["pillow"] = PluginConfig( + name="pillow", class_name="PillowPlugin", module_name="imageio.plugins.pillow" +) +known_plugins["pyav"] = PluginConfig( + name="pyav", class_name="PyAVPlugin", module_name="imageio.plugins.pyav" +) +known_plugins["opencv"] = PluginConfig( + name="opencv", class_name="OpenCVPlugin", module_name="imageio.plugins.opencv" +) +known_plugins["tifffile"] = PluginConfig( + name="tifffile", + class_name="TifffilePlugin", + module_name="imageio.plugins.tifffile_v3", +) +known_plugins["SPE"] = PluginConfig( + name="spe", class_name="SpePlugin", module_name="imageio.plugins.spe" +) + + +# Legacy plugins +# ============== +# +# Which are partly registered by format, partly by plugin, and partly by a mix +# of both. We keep the naming here for backwards compatibility. +# In v3 this should become a single entry per plugin named after the plugin +# We can choose extension-specific priority in ``config.extensions``. +# +# Note: Since python 3.7 order of insertion determines the order of dict().keys() +# This means that the order here determines the order by which plugins are +# checked during the full fallback search. We don't advertise this downstream, +# but it could be a useful thing to keep in mind to choose a sensible default +# search order. + +known_plugins["TIFF"] = PluginConfig( + name="TIFF", + class_name="TiffFormat", + module_name="imageio.plugins.tifffile", + is_legacy=True, + install_name="tifffile", + legacy_args={ + "description": "TIFF format", + "extensions": ".tif .tiff .stk .lsm", + "modes": "iIvV", + }, +) + +# PILLOW plugin formats (legacy) +PILLOW_FORMATS = [ + ("BMP", "Windows Bitmap", ".bmp", "PillowFormat"), + ("BUFR", "BUFR", ".bufr", "PillowFormat"), + ("CUR", "Windows Cursor", ".cur", "PillowFormat"), + ("DCX", "Intel DCX", ".dcx", "PillowFormat"), + ("DDS", "DirectDraw Surface", ".dds", "PillowFormat"), + ("DIB", "Windows Bitmap", "", "PillowFormat"), + ("EPS", "Encapsulated Postscript", ".ps .eps", "PillowFormat"), + ("FITS", "FITS", ".fit .fits", "PillowFormat"), + ("FLI", "Autodesk FLI/FLC Animation", ".fli .flc", "PillowFormat"), + ("FPX", "FlashPix", ".fpx", "PillowFormat"), + ("FTEX", "Texture File Format (IW2:EOC)", ".ftc .ftu", "PillowFormat"), + ("GBR", "GIMP brush file", ".gbr", "PillowFormat"), + ("GIF", "Compuserve GIF", ".gif", "GIFFormat"), + ("GRIB", "GRIB", ".grib", "PillowFormat"), + ("HDF5", "HDF5", ".h5 .hdf", "PillowFormat"), + ("ICNS", "Mac OS icns resource", ".icns", "PillowFormat"), + ("ICO", "Windows Icon", ".ico", "PillowFormat"), + ("IM", "IFUNC Image Memory", ".im", "PillowFormat"), + ("IMT", "IM Tools", "", "PillowFormat"), + ("IPTC", "IPTC/NAA", ".iim", "PillowFormat"), + ("JPEG", "JPEG (ISO 10918)", ".jfif .jpe .jpg .jpeg", "JPEGFormat"), + ( + "JPEG2000", + "JPEG 2000 (ISO 15444)", + ".jp2 .j2k .jpc .jpf .jpx .j2c", + "JPEG2000Format", + ), + ("MCIDAS", "McIdas area file", "", "PillowFormat"), + ("MIC", "Microsoft Image Composer", ".mic", "PillowFormat"), + # skipped in legacy pillow + # ("MPEG", "MPEG", ".mpg .mpeg", "PillowFormat"), + ("MPO", "MPO (CIPA DC-007)", ".mpo", "PillowFormat"), + ("MSP", "Windows Paint", ".msp", "PillowFormat"), + ("PCD", "Kodak PhotoCD", ".pcd", "PillowFormat"), + ("PCX", "Paintbrush", ".pcx", "PillowFormat"), + ("PIXAR", "PIXAR raster image", ".pxr", "PillowFormat"), + ("PNG", "Portable network graphics", ".png", "PNGFormat"), + ("PPM", "Pbmplus image", ".pbm .pgm .ppm", "PillowFormat"), + ("PSD", "Adobe Photoshop", ".psd", "PillowFormat"), + ("SGI", "SGI Image File Format", ".bw .rgb .rgba .sgi", "PillowFormat"), + ("SPIDER", "Spider 2D image", "", "PillowFormat"), + ("SUN", "Sun Raster File", ".ras", "PillowFormat"), + ("TGA", "Targa", ".tga", "PillowFormat"), + ("TIFF", "Adobe TIFF", ".tif .tiff", "TIFFFormat"), + ("WMF", "Windows Metafile", ".wmf .emf", "PillowFormat"), + ("XBM", "X11 Bitmap", ".xbm", "PillowFormat"), + ("XPM", "X11 Pixel Map", ".xpm", "PillowFormat"), + ("XVTHUMB", "XV thumbnail image", "", "PillowFormat"), +] +for id, summary, ext, class_name in PILLOW_FORMATS: + config = PluginConfig( + name=id.upper() + "-PIL", + class_name=class_name, + module_name="imageio.plugins.pillow_legacy", + is_legacy=True, + install_name="pillow", + legacy_args={ + "description": summary + " via Pillow", + "extensions": ext, + "modes": "iI" if class_name == "GIFFormat" else "i", + "plugin_id": id, + }, + ) + known_plugins[config.name] = config + +known_plugins["FFMPEG"] = PluginConfig( + name="FFMPEG", + class_name="FfmpegFormat", + module_name="imageio.plugins.ffmpeg", + is_legacy=True, + install_name="ffmpeg", + legacy_args={ + "description": "Many video formats and cameras (via ffmpeg)", + "extensions": ".mov .avi .mpg .mpeg .mp4 .mkv .webm .wmv .h264", + "modes": "I", + }, +) + +known_plugins["BSDF"] = PluginConfig( + name="BSDF", + class_name="BsdfFormat", + module_name="imageio.plugins.bsdf", + is_legacy=True, + install_name="bsdf", + legacy_args={ + "description": "Format based on the Binary Structured Data Format", + "extensions": ".bsdf", + "modes": "iIvV", + }, +) + +known_plugins["DICOM"] = PluginConfig( + name="DICOM", + class_name="DicomFormat", + module_name="imageio.plugins.dicom", + is_legacy=True, + install_name="dicom", + legacy_args={ + "description": "Digital Imaging and Communications in Medicine", + "extensions": ".dcm .ct .mri", + "modes": "iIvV", + }, +) + +known_plugins["FEI"] = PluginConfig( + name="FEI", + class_name="FEISEMFormat", + module_name="imageio.plugins.feisem", + is_legacy=True, + install_name="feisem", + legacy_args={ + "description": "FEI-SEM TIFF format", + "extensions": [".tif", ".tiff"], + "modes": "iv", + }, +) + +known_plugins["FITS"] = PluginConfig( + name="FITS", + class_name="FitsFormat", + module_name="imageio.plugins.fits", + is_legacy=True, + install_name="fits", + legacy_args={ + "description": "Flexible Image Transport System (FITS) format", + "extensions": ".fits .fit .fts .fz", + "modes": "iIvV", + }, +) + +known_plugins["GDAL"] = PluginConfig( + name="GDAL", + class_name="GdalFormat", + module_name="imageio.plugins.gdal", + is_legacy=True, + install_name="gdal", + legacy_args={ + "description": "Geospatial Data Abstraction Library", + "extensions": ".tiff .tif .img .ecw .jpg .jpeg", + "modes": "iIvV", + }, +) + +known_plugins["ITK"] = PluginConfig( + name="ITK", + class_name="ItkFormat", + module_name="imageio.plugins.simpleitk", + is_legacy=True, + install_name="simpleitk", + legacy_args={ + "description": "Insight Segmentation and Registration Toolkit (ITK) format", + "extensions": " ".join( + ( + ".gipl", + ".ipl", + ".mha", + ".mhd", + ".nhdr", + ".nia", + ".hdr", + ".nrrd", + ".nii", + ".nii.gz", + ".img", + ".img.gz", + ".vtk", + ".hdf5", + ".lsm", + ".mnc", + ".mnc2", + ".mgh", + ".mnc", + ".pic", + ".bmp", + ".jpeg", + ".jpg", + ".png", + ".tiff", + ".tif", + ".dicom", + ".dcm", + ".gdcm", + ) + ), + "modes": "iIvV", + }, +) + +known_plugins["NPZ"] = PluginConfig( + name="NPZ", + class_name="NpzFormat", + module_name="imageio.plugins.npz", + is_legacy=True, + install_name="numpy", + legacy_args={ + "description": "Numpy's compressed array format", + "extensions": ".npz", + "modes": "iIvV", + }, +) + +known_plugins["SWF"] = PluginConfig( + name="SWF", + class_name="SWFFormat", + module_name="imageio.plugins.swf", + is_legacy=True, + install_name="swf", + legacy_args={ + "description": "Shockwave flash", + "extensions": ".swf", + "modes": "I", + }, +) + +known_plugins["SCREENGRAB"] = PluginConfig( + name="SCREENGRAB", + class_name="ScreenGrabFormat", + module_name="imageio.plugins.grab", + is_legacy=True, + install_name="pillow", + legacy_args={ + "description": "Grab screenshots (Windows and OS X only)", + "extensions": [], + "modes": "i", + }, +) + +known_plugins["CLIPBOARDGRAB"] = PluginConfig( + name="CLIPBOARDGRAB", + class_name="ClipboardGrabFormat", + module_name="imageio.plugins.grab", + is_legacy=True, + install_name="pillow", + legacy_args={ + "description": "Grab from clipboard (Windows only)", + "extensions": [], + "modes": "i", + }, +) + +# LYTRO plugin (legacy) +lytro_formats = [ + ("lytro-lfr", "Lytro Illum lfr image file", ".lfr", "i", "LytroLfrFormat"), + ( + "lytro-illum-raw", + "Lytro Illum raw image file", + ".raw", + "i", + "LytroIllumRawFormat", + ), + ("lytro-lfp", "Lytro F01 lfp image file", ".lfp", "i", "LytroLfpFormat"), + ("lytro-f01-raw", "Lytro F01 raw image file", ".raw", "i", "LytroF01RawFormat"), +] +for name, des, ext, mode, class_name in lytro_formats: + config = PluginConfig( + name=name.upper(), + class_name=class_name, + module_name="imageio.plugins.lytro", + is_legacy=True, + install_name="lytro", + legacy_args={ + "description": des, + "extensions": ext, + "modes": mode, + }, + ) + known_plugins[config.name] = config + +# FreeImage plugin (legacy) +FREEIMAGE_FORMATS = [ + ( + "BMP", + 0, + "Windows or OS/2 Bitmap", + ".bmp", + "i", + "FreeimageBmpFormat", + "imageio.plugins.freeimage", + ), + ( + "CUT", + 21, + "Dr. Halo", + ".cut", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "DDS", + 24, + "DirectX Surface", + ".dds", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "EXR", + 29, + "ILM OpenEXR", + ".exr", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "G3", + 27, + "Raw fax format CCITT G.3", + ".g3", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "GIF", + 25, + "Static and animated gif (FreeImage)", + ".gif", + "iI", + "GifFormat", + "imageio.plugins.freeimagemulti", + ), + ( + "HDR", + 26, + "High Dynamic Range Image", + ".hdr", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "ICO", + 1, + "Windows Icon", + ".ico", + "iI", + "IcoFormat", + "imageio.plugins.freeimagemulti", + ), + ( + "IFF", + 5, + "IFF Interleaved Bitmap", + ".iff .lbm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "J2K", + 30, + "JPEG-2000 codestream", + ".j2k .j2c", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "JNG", + 3, + "JPEG Network Graphics", + ".jng", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "JP2", + 31, + "JPEG-2000 File Format", + ".jp2", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "JPEG", + 2, + "JPEG - JFIF Compliant", + ".jpg .jif .jpeg .jpe", + "i", + "FreeimageJpegFormat", + "imageio.plugins.freeimage", + ), + ( + "JPEG-XR", + 36, + "JPEG XR image format", + ".jxr .wdp .hdp", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "KOALA", + 4, + "C64 Koala Graphics", + ".koa", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + # not registered in legacy pillow + # ("MNG", 6, "Multiple-image Network Graphics", ".mng", "i", "FreeimageFormat", "imageio.plugins.freeimage"), + ( + "PBM", + 7, + "Portable Bitmap (ASCII)", + ".pbm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PBMRAW", + 8, + "Portable Bitmap (RAW)", + ".pbm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PCD", + 9, + "Kodak PhotoCD", + ".pcd", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PCX", + 10, + "Zsoft Paintbrush", + ".pcx", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PFM", + 32, + "Portable floatmap", + ".pfm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PGM", + 11, + "Portable Greymap (ASCII)", + ".pgm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PGMRAW", + 12, + "Portable Greymap (RAW)", + ".pgm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PICT", + 33, + "Macintosh PICT", + ".pct .pict .pic", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "PNG", + 13, + "Portable Network Graphics", + ".png", + "i", + "FreeimagePngFormat", + "imageio.plugins.freeimage", + ), + ( + "PPM", + 14, + "Portable Pixelmap (ASCII)", + ".ppm", + "i", + "FreeimagePnmFormat", + "imageio.plugins.freeimage", + ), + ( + "PPMRAW", + 15, + "Portable Pixelmap (RAW)", + ".ppm", + "i", + "FreeimagePnmFormat", + "imageio.plugins.freeimage", + ), + ( + "PSD", + 20, + "Adobe Photoshop", + ".psd", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "RAS", + 16, + "Sun Raster Image", + ".ras", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "RAW", + 34, + "RAW camera image", + ".3fr .arw .bay .bmq .cap .cine .cr2 .crw .cs1 .dc2 " + ".dcr .drf .dsc .dng .erf .fff .ia .iiq .k25 .kc2 .kdc .mdc .mef .mos .mrw .nef .nrw .orf " + ".pef .ptx .pxn .qtk .raf .raw .rdc .rw2 .rwl .rwz .sr2 .srf .srw .sti", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "SGI", + 28, + "SGI Image Format", + ".sgi .rgb .rgba .bw", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "TARGA", + 17, + "Truevision Targa", + ".tga .targa", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "TIFF", + 18, + "Tagged Image File Format", + ".tif .tiff", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "WBMP", + 19, + "Wireless Bitmap", + ".wap .wbmp .wbm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "WebP", + 35, + "Google WebP image format", + ".webp", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "XBM", + 22, + "X11 Bitmap Format", + ".xbm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), + ( + "XPM", + 23, + "X11 Pixmap Format", + ".xpm", + "i", + "FreeimageFormat", + "imageio.plugins.freeimage", + ), +] +for name, i, des, ext, mode, class_name, module_name in FREEIMAGE_FORMATS: + config = PluginConfig( + name=name.upper() + "-FI", + class_name=class_name, + module_name=module_name, + is_legacy=True, + install_name="freeimage", + legacy_args={ + "description": des, + "extensions": ext, + "modes": mode, + "fif": i, + }, + ) + known_plugins[config.name] = config + +# exists for backwards compatibility with FormatManager +# delete in V3 +_original_order = [x for x, config in known_plugins.items() if config.is_legacy] diff --git a/.venv/Lib/site-packages/imageio/config/plugins.pyi b/.venv/Lib/site-packages/imageio/config/plugins.pyi new file mode 100644 index 00000000..ab5d4a81 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/config/plugins.pyi @@ -0,0 +1,28 @@ +from typing import Any, Dict, Optional +from ..core.v3_plugin_api import PluginV3 + +class PluginConfig: + name: str + class_name: str + module_name: str + is_legacy: bool + package_name: Optional[str] = None + install_name: Optional[str] = None + legacy_args: Optional[dict] = None + @property + def format(self) -> Any: ... + @property + def plugin_class(self) -> PluginV3: ... + def __init__( + self, + name: str, + class_name: str, + module_name: str, + *, + is_legacy: bool = False, + package_name: str = None, + install_name: str = None, + legacy_args: dict = None, + ) -> None: ... + +known_plugins: Dict[str, PluginConfig] diff --git a/.venv/Lib/site-packages/imageio/core/__init__.py b/.venv/Lib/site-packages/imageio/core/__init__.py new file mode 100644 index 00000000..80bedab1 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Distributed under the (new) BSD License. See LICENSE.txt for more info. + +""" This subpackage provides the core functionality of imageio +(everything but the plugins). +""" + +# flake8: noqa + +from .util import Image, Array, Dict, asarray, image_as_uint, urlopen +from .util import BaseProgressIndicator, StdoutProgressIndicator, IS_PYPY +from .util import get_platform, appdata_dir, resource_dirs, has_module +from .findlib import load_lib +from .fetching import get_remote_file, InternetNotAllowedError, NeedDownloadError +from .request import Request, read_n_bytes, RETURN_BYTES +from .format import Format, FormatManager diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..046fe918 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/fetching.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/fetching.cpython-311.pyc new file mode 100644 index 00000000..8743c27b Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/fetching.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/findlib.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/findlib.cpython-311.pyc new file mode 100644 index 00000000..7bf58d6d Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/findlib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/format.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/format.cpython-311.pyc new file mode 100644 index 00000000..bbf9fdcd Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/format.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/imopen.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/imopen.cpython-311.pyc new file mode 100644 index 00000000..90dc0116 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/imopen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/legacy_plugin_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/legacy_plugin_wrapper.cpython-311.pyc new file mode 100644 index 00000000..3bce24b0 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/legacy_plugin_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/request.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/request.cpython-311.pyc new file mode 100644 index 00000000..cdc43a14 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/request.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/util.cpython-311.pyc new file mode 100644 index 00000000..2364c512 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/__pycache__/v3_plugin_api.cpython-311.pyc b/.venv/Lib/site-packages/imageio/core/__pycache__/v3_plugin_api.cpython-311.pyc new file mode 100644 index 00000000..7865ff86 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/core/__pycache__/v3_plugin_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/core/fetching.py b/.venv/Lib/site-packages/imageio/core/fetching.py new file mode 100644 index 00000000..0380bc79 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/fetching.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Based on code from the vispy project +# Distributed under the (new) BSD License. See LICENSE.txt for more info. + +"""Data downloading and reading functions +""" + +from math import log +import os +from os import path as op +import sys +import shutil +import time + +from . import appdata_dir, resource_dirs +from . import StdoutProgressIndicator, urlopen + + +class InternetNotAllowedError(IOError): + """Plugins that need resources can just use get_remote_file(), but + should catch this error and silently ignore it. + """ + + pass + + +class NeedDownloadError(IOError): + """Is raised when a remote file is requested that is not locally + available, but which needs to be explicitly downloaded by the user. + """ + + +def get_remote_file(fname, directory=None, force_download=False, auto=True): + """Get a the filename for the local version of a file from the web + + Parameters + ---------- + fname : str + The relative filename on the remote data repository to download. + These correspond to paths on + ``https://github.com/imageio/imageio-binaries/``. + directory : str | None + The directory where the file will be cached if a download was + required to obtain the file. By default, the appdata directory + is used. This is also the first directory that is checked for + a local version of the file. If the directory does not exist, + it will be created. + force_download : bool | str + If True, the file will be downloaded even if a local copy exists + (and this copy will be overwritten). Can also be a YYYY-MM-DD date + to ensure a file is up-to-date (modified date of a file on disk, + if present, is checked). + auto : bool + Whether to auto-download the file if its not present locally. Default + True. If False and a download is needed, raises NeedDownloadError. + + Returns + ------- + fname : str + The path to the file on the local system. + """ + _url_root = "https://github.com/imageio/imageio-binaries/raw/master/" + url = _url_root + fname + nfname = op.normcase(fname) # convert to native + # Get dirs to look for the resource + given_directory = directory + directory = given_directory or appdata_dir("imageio") + dirs = resource_dirs() + dirs.insert(0, directory) # Given dir has preference + # Try to find the resource locally + for dir in dirs: + filename = op.join(dir, nfname) + if op.isfile(filename): + if not force_download: # we're done + if given_directory and given_directory != dir: + filename2 = os.path.join(given_directory, nfname) + # Make sure the output directory exists + if not op.isdir(op.dirname(filename2)): + os.makedirs(op.abspath(op.dirname(filename2))) + shutil.copy(filename, filename2) + return filename2 + return filename + if isinstance(force_download, str): + ntime = time.strptime(force_download, "%Y-%m-%d") + ftime = time.gmtime(op.getctime(filename)) + if ftime >= ntime: + if given_directory and given_directory != dir: + filename2 = os.path.join(given_directory, nfname) + # Make sure the output directory exists + if not op.isdir(op.dirname(filename2)): + os.makedirs(op.abspath(op.dirname(filename2))) + shutil.copy(filename, filename2) + return filename2 + return filename + else: + print("File older than %s, updating..." % force_download) + break + + # If we get here, we're going to try to download the file + if os.getenv("IMAGEIO_NO_INTERNET", "").lower() in ("1", "true", "yes"): + raise InternetNotAllowedError( + "Will not download resource from the " + "internet because environment variable " + "IMAGEIO_NO_INTERNET is set." + ) + + # Can we proceed with auto-download? + if not auto: + raise NeedDownloadError() + + # Get filename to store to and make sure the dir exists + filename = op.join(directory, nfname) + if not op.isdir(op.dirname(filename)): + os.makedirs(op.abspath(op.dirname(filename))) + # let's go get the file + if os.getenv("CONTINUOUS_INTEGRATION", False): # pragma: no cover + # On CI, we retry a few times ... + for i in range(2): + try: + _fetch_file(url, filename) + return filename + except IOError: + time.sleep(0.5) + else: + _fetch_file(url, filename) + return filename + else: # pragma: no cover + _fetch_file(url, filename) + return filename + + +def _fetch_file(url, file_name, print_destination=True): + """Load requested file, downloading it if needed or requested + + Parameters + ---------- + url: string + The url of file to be downloaded. + file_name: string + Name, along with the path, of where downloaded file will be saved. + print_destination: bool, optional + If true, destination of where file was saved will be printed after + download finishes. + resume: bool, optional + If true, try to resume partially downloaded files. + """ + # Adapted from NISL: + # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py + + print( + "Imageio: %r was not found on your computer; " + "downloading it now." % os.path.basename(file_name) + ) + + temp_file_name = file_name + ".part" + local_file = None + initial_size = 0 + errors = [] + for tries in range(4): + try: + # Checking file size and displaying it alongside the download url + remote_file = urlopen(url, timeout=5.0) + file_size = int(remote_file.headers["Content-Length"].strip()) + size_str = _sizeof_fmt(file_size) + print("Try %i. Download from %s (%s)" % (tries + 1, url, size_str)) + # Downloading data (can be extended to resume if need be) + local_file = open(temp_file_name, "wb") + _chunk_read(remote_file, local_file, initial_size=initial_size) + # temp file must be closed prior to the move + if not local_file.closed: + local_file.close() + shutil.move(temp_file_name, file_name) + if print_destination is True: + sys.stdout.write("File saved as %s.\n" % file_name) + break + except Exception as e: + errors.append(e) + print("Error while fetching file: %s." % str(e)) + finally: + if local_file is not None: + if not local_file.closed: + local_file.close() + else: + raise IOError( + "Unable to download %r. Perhaps there is no internet " + "connection? If there is, please report this problem." + % os.path.basename(file_name) + ) + + +def _chunk_read(response, local_file, chunk_size=8192, initial_size=0): + """Download a file chunk by chunk and show advancement + + Can also be used when resuming downloads over http. + + Parameters + ---------- + response: urllib.response.addinfourl + Response to the download request in order to get file size. + local_file: file + Hard disk file where data should be written. + chunk_size: integer, optional + Size of downloaded chunks. Default: 8192 + initial_size: int, optional + If resuming, indicate the initial size of the file. + """ + # Adapted from NISL: + # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py + + bytes_so_far = initial_size + # Returns only amount left to download when resuming, not the size of the + # entire file + total_size = int(response.headers["Content-Length"].strip()) + total_size += initial_size + + progress = StdoutProgressIndicator("Downloading") + progress.start("", "bytes", total_size) + + while True: + chunk = response.read(chunk_size) + bytes_so_far += len(chunk) + if not chunk: + break + _chunk_write(chunk, local_file, progress) + progress.finish("Done") + + +def _chunk_write(chunk, local_file, progress): + """Write a chunk to file and update the progress bar""" + local_file.write(chunk) + progress.increase_progress(len(chunk)) + time.sleep(0) # Give other threads a chance, e.g. those that handle stdout pipes + + +def _sizeof_fmt(num): + """Turn number of bytes into human-readable str""" + units = ["bytes", "kB", "MB", "GB", "TB", "PB"] + decimals = [0, 0, 1, 2, 2, 2] + """Human friendly file size""" + if num > 1: + exponent = min(int(log(num, 1024)), len(units) - 1) + quotient = float(num) / 1024**exponent + unit = units[exponent] + num_decimals = decimals[exponent] + format_string = "{0:.%sf} {1}" % num_decimals + return format_string.format(quotient, unit) + return "0 bytes" if num == 0 else "1 byte" diff --git a/.venv/Lib/site-packages/imageio/core/findlib.py b/.venv/Lib/site-packages/imageio/core/findlib.py new file mode 100644 index 00000000..76bda521 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/findlib.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2015-1018, imageio contributors +# Copyright (C) 2013, Zach Pincus, Almar Klein and others + +""" This module contains generic code to find and load a dynamic library. +""" + +import os +import sys +import ctypes + + +LOCALDIR = os.path.abspath(os.path.dirname(__file__)) + +# Flag that can be patched / set to True to disable loading non-system libs +SYSTEM_LIBS_ONLY = False + + +def looks_lib(fname): + """Returns True if the given filename looks like a dynamic library. + Based on extension, but cross-platform and more flexible. + """ + fname = fname.lower() + if sys.platform.startswith("win"): + return fname.endswith(".dll") + elif sys.platform.startswith("darwin"): + return fname.endswith(".dylib") + else: + return fname.endswith(".so") or ".so." in fname + + +def generate_candidate_libs(lib_names, lib_dirs=None): + """Generate a list of candidate filenames of what might be the dynamic + library corresponding with the given list of names. + Returns (lib_dirs, lib_paths) + """ + lib_dirs = lib_dirs or [] + + # Get system dirs to search + sys_lib_dirs = [ + "/lib", + "/usr/lib", + "/usr/lib/x86_64-linux-gnu", + "/usr/lib/aarch64-linux-gnu", + "/usr/local/lib", + "/opt/local/lib", + ] + + # Get Python dirs to search (shared if for Pyzo) + py_sub_dirs = ["bin", "lib", "DLLs", "Library/bin", "shared"] + py_lib_dirs = [os.path.join(sys.prefix, d) for d in py_sub_dirs] + if hasattr(sys, "base_prefix"): + py_lib_dirs += [os.path.join(sys.base_prefix, d) for d in py_sub_dirs] + + # Get user dirs to search (i.e. HOME) + home_dir = os.path.expanduser("~") + user_lib_dirs = [os.path.join(home_dir, d) for d in ["lib"]] + + # Select only the dirs for which a directory exists, and remove duplicates + potential_lib_dirs = lib_dirs + sys_lib_dirs + py_lib_dirs + user_lib_dirs + lib_dirs = [] + for ld in potential_lib_dirs: + if os.path.isdir(ld) and ld not in lib_dirs: + lib_dirs.append(ld) + + # Now attempt to find libraries of that name in the given directory + # (case-insensitive) + lib_paths = [] + for lib_dir in lib_dirs: + # Get files, prefer short names, last version + files = os.listdir(lib_dir) + files = reversed(sorted(files)) + files = sorted(files, key=len) + for lib_name in lib_names: + # Test all filenames for name and ext + for fname in files: + if fname.lower().startswith(lib_name) and looks_lib(fname): + lib_paths.append(os.path.join(lib_dir, fname)) + + # Return (only the items which are files) + lib_paths = [lp for lp in lib_paths if os.path.isfile(lp)] + return lib_dirs, lib_paths + + +def load_lib(exact_lib_names, lib_names, lib_dirs=None): + """load_lib(exact_lib_names, lib_names, lib_dirs=None) + + Load a dynamic library. + + This function first tries to load the library from the given exact + names. When that fails, it tries to find the library in common + locations. It searches for files that start with one of the names + given in lib_names (case insensitive). The search is performed in + the given lib_dirs and a set of common library dirs. + + Returns ``(ctypes_library, library_path)`` + """ + + # Checks + assert isinstance(exact_lib_names, list) + assert isinstance(lib_names, list) + if lib_dirs is not None: + assert isinstance(lib_dirs, list) + exact_lib_names = [n for n in exact_lib_names if n] + lib_names = [n for n in lib_names if n] + + # Get reference name (for better messages) + if lib_names: + the_lib_name = lib_names[0] + elif exact_lib_names: + the_lib_name = exact_lib_names[0] + else: + raise ValueError("No library name given.") + + # Collect filenames of potential libraries + # First try a few bare library names that ctypes might be able to find + # in the default locations for each platform. + if SYSTEM_LIBS_ONLY: + lib_dirs, lib_paths = [], [] + else: + lib_dirs, lib_paths = generate_candidate_libs(lib_names, lib_dirs) + lib_paths = exact_lib_names + lib_paths + + # Select loader + if sys.platform.startswith("win"): + loader = ctypes.windll + else: + loader = ctypes.cdll + + # Try to load until success + the_lib = None + errors = [] + for fname in lib_paths: + try: + the_lib = loader.LoadLibrary(fname) + break + except Exception as err: + # Don't record errors when it couldn't load the library from an + # exact name -- this fails often, and doesn't provide any useful + # debugging information anyway, beyond "couldn't find library..." + if fname not in exact_lib_names: + errors.append((fname, err)) + + # No success ... + if the_lib is None: + if errors: + # No library loaded, and load-errors reported for some + # candidate libs + err_txt = ["%s:\n%s" % (lib, str(e)) for lib, e in errors] + msg = ( + "One or more %s libraries were found, but " + + "could not be loaded due to the following errors:\n%s" + ) + raise OSError(msg % (the_lib_name, "\n\n".join(err_txt))) + else: + # No errors, because no potential libraries found at all! + msg = "Could not find a %s library in any of:\n%s" + raise OSError(msg % (the_lib_name, "\n".join(lib_dirs))) + + # Done + return the_lib, fname diff --git a/.venv/Lib/site-packages/imageio/core/format.py b/.venv/Lib/site-packages/imageio/core/format.py new file mode 100644 index 00000000..109cd8e7 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/format.py @@ -0,0 +1,881 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" + +.. note:: + imageio is under construction, some details with regard to the + Reader and Writer classes may change. + +These are the main classes of imageio. They expose an interface for +advanced users and plugin developers. A brief overview: + + * imageio.FormatManager - for keeping track of registered formats. + * imageio.Format - representation of a file format reader/writer + * imageio.Format.Reader - object used during the reading of a file. + * imageio.Format.Writer - object used during saving a file. + * imageio.Request - used to store the filename and other info. + +Plugins need to implement a Format class and register +a format object using ``imageio.formats.add_format()``. + +""" + +# todo: do we even use the known extensions? + +# Some notes: +# +# The classes in this module use the Request object to pass filename and +# related info around. This request object is instantiated in +# imageio.get_reader and imageio.get_writer. + +import sys +import warnings +import contextlib + +import numpy as np +from pathlib import Path + +from . import Array, asarray +from .request import ImageMode +from ..config import known_plugins, known_extensions, PluginConfig, FileExtension +from ..config.plugins import _original_order +from .imopen import imopen + + +# survived for backwards compatibility +# I don't know if external plugin code depends on it existing +# We no longer do +MODENAMES = ImageMode + + +def _get_config(plugin): + """Old Plugin resolution logic. + + Remove once we remove the old format manager. + """ + + extension_name = None + + if Path(plugin).suffix.lower() in known_extensions: + extension_name = Path(plugin).suffix.lower() + elif plugin in known_plugins: + pass + elif plugin.lower() in known_extensions: + extension_name = plugin.lower() + elif "." + plugin.lower() in known_extensions: + extension_name = "." + plugin.lower() + else: + raise IndexError(f"No format known by name `{plugin}`.") + + if extension_name is not None: + for plugin_name in [ + x + for file_extension in known_extensions[extension_name] + for x in file_extension.priority + ]: + if known_plugins[plugin_name].is_legacy: + plugin = plugin_name + break + + return known_plugins[plugin] + + +class Format(object): + """Represents an implementation to read/write a particular file format + + A format instance is responsible for 1) providing information about + a format; 2) determining whether a certain file can be read/written + with this format; 3) providing a reader/writer class. + + Generally, imageio will select the right format and use that to + read/write an image. A format can also be explicitly chosen in all + read/write functions. Use ``print(format)``, or ``help(format_name)`` + to see its documentation. + + To implement a specific format, one should create a subclass of + Format and the Format.Reader and Format.Writer classes. See + :class:`imageio.plugins` for details. + + Parameters + ---------- + name : str + A short name of this format. Users can select a format using its name. + description : str + A one-line description of the format. + extensions : str | list | None + List of filename extensions that this format supports. If a + string is passed it should be space or comma separated. The + extensions are used in the documentation and to allow users to + select a format by file extension. It is not used to determine + what format to use for reading/saving a file. + modes : str + A string containing the modes that this format can handle ('iIvV'), + “i” for an image, “I” for multiple images, “v” for a volume, + “V” for multiple volumes. + This attribute is used in the documentation and to select the + formats when reading/saving a file. + """ + + def __init__(self, name, description, extensions=None, modes=None): + """Initialize the Plugin. + + Parameters + ---------- + name : str + A short name of this format. Users can select a format using its name. + description : str + A one-line description of the format. + extensions : str | list | None + List of filename extensions that this format supports. If a + string is passed it should be space or comma separated. The + extensions are used in the documentation and to allow users to + select a format by file extension. It is not used to determine + what format to use for reading/saving a file. + modes : str + A string containing the modes that this format can handle ('iIvV'), + “i” for an image, “I” for multiple images, “v” for a volume, + “V” for multiple volumes. + This attribute is used in the documentation and to select the + formats when reading/saving a file. + """ + + # Store name and description + self._name = name.upper() + self._description = description + + # Store extensions, do some effort to normalize them. + # They are stored as a list of lowercase strings without leading dots. + if extensions is None: + extensions = [] + elif isinstance(extensions, str): + extensions = extensions.replace(",", " ").split(" ") + # + if isinstance(extensions, (tuple, list)): + self._extensions = tuple( + ["." + e.strip(".").lower() for e in extensions if e] + ) + else: + raise ValueError("Invalid value for extensions given.") + + # Store mode + self._modes = modes or "" + if not isinstance(self._modes, str): + raise ValueError("Invalid value for modes given.") + for m in self._modes: + if m not in "iIvV?": + raise ValueError("Invalid value for mode given.") + + def __repr__(self): + # Short description + return "" % (self.name, self.description) + + def __str__(self): + return self.doc + + @property + def doc(self): + """The documentation for this format (name + description + docstring).""" + # Our docsring is assumed to be indented by four spaces. The + # first line needs special attention. + return "%s - %s\n\n %s\n" % ( + self.name, + self.description, + self.__doc__.strip(), + ) + + @property + def name(self): + """The name of this format.""" + return self._name + + @property + def description(self): + """A short description of this format.""" + return self._description + + @property + def extensions(self): + """A list of file extensions supported by this plugin. + These are all lowercase with a leading dot. + """ + return self._extensions + + @property + def modes(self): + """A string specifying the modes that this format can handle.""" + return self._modes + + def get_reader(self, request): + """get_reader(request) + + Return a reader object that can be used to read data and info + from the given file. Users are encouraged to use + imageio.get_reader() instead. + """ + select_mode = request.mode[1] if request.mode[1] in "iIvV" else "" + if select_mode not in self.modes: + raise RuntimeError( + f"Format {self.name} cannot read in {request.mode.image_mode} mode" + ) + return self.Reader(self, request) + + def get_writer(self, request): + """get_writer(request) + + Return a writer object that can be used to write data and info + to the given file. Users are encouraged to use + imageio.get_writer() instead. + """ + select_mode = request.mode[1] if request.mode[1] in "iIvV" else "" + if select_mode not in self.modes: + raise RuntimeError( + f"Format {self.name} cannot write in {request.mode.image_mode} mode" + ) + return self.Writer(self, request) + + def can_read(self, request): + """can_read(request) + + Get whether this format can read data from the specified uri. + """ + return self._can_read(request) + + def can_write(self, request): + """can_write(request) + + Get whether this format can write data to the speciefed uri. + """ + return self._can_write(request) + + def _can_read(self, request): # pragma: no cover + """Check if Plugin can read from ImageResource. + + This method is called when the format manager is searching for a format + to read a certain image. Return True if this format can do it. + + The format manager is aware of the extensions and the modes that each + format can handle. It will first ask all formats that *seem* to be able + to read it whether they can. If none can, it will ask the remaining + formats if they can: the extension might be missing, and this allows + formats to provide functionality for certain extensions, while giving + preference to other plugins. + + If a format says it can, it should live up to it. The format would + ideally check the request.firstbytes and look for a header of some kind. + + Parameters + ---------- + request : Request + A request that can be used to access the ImageResource and obtain + metadata about it. + + Returns + ------- + can_read : bool + True if the plugin can read from the ImageResource, False otherwise. + + """ + return None # Plugins must implement this + + def _can_write(self, request): # pragma: no cover + """Check if Plugin can write to ImageResource. + + Parameters + ---------- + request : Request + A request that can be used to access the ImageResource and obtain + metadata about it. + + Returns + ------- + can_read : bool + True if the plugin can write to the ImageResource, False otherwise. + + """ + return None # Plugins must implement this + + # ----- + + class _BaseReaderWriter(object): + """Base class for the Reader and Writer class to implement common + functionality. It implements a similar approach for opening/closing + and context management as Python's file objects. + """ + + def __init__(self, format, request): + self.__closed = False + self._BaseReaderWriter_last_index = -1 + self._format = format + self._request = request + # Open the reader/writer + self._open(**self.request.kwargs.copy()) + + @property + def format(self): + """The :class:`.Format` object corresponding to the current + read/write operation. + """ + return self._format + + @property + def request(self): + """The :class:`.Request` object corresponding to the + current read/write operation. + """ + return self._request + + def __enter__(self): + self._checkClosed() + return self + + def __exit__(self, type, value, traceback): + if value is None: + # Otherwise error in close hide the real error. + self.close() + + def __del__(self): + try: + self.close() + except Exception: # pragma: no cover + pass # Suppress noise when called during interpreter shutdown + + def close(self): + """Flush and close the reader/writer. + This method has no effect if it is already closed. + """ + if self.__closed: + return + self.__closed = True + self._close() + # Process results and clean request object + self.request.finish() + + @property + def closed(self): + """Whether the reader/writer is closed.""" + return self.__closed + + def _checkClosed(self, msg=None): + """Internal: raise an ValueError if reader/writer is closed""" + if self.closed: + what = self.__class__.__name__ + msg = msg or ("I/O operation on closed %s." % what) + raise RuntimeError(msg) + + # To implement + + def _open(self, **kwargs): + """_open(**kwargs) + + Plugins should probably implement this. + + It is called when reader/writer is created. Here the + plugin can do its initialization. The given keyword arguments + are those that were given by the user at imageio.read() or + imageio.write(). + """ + raise NotImplementedError() + + def _close(self): + """_close() + + Plugins should probably implement this. + + It is called when the reader/writer is closed. Here the plugin + can do a cleanup, flush, etc. + + """ + raise NotImplementedError() + + # ----- + + class Reader(_BaseReaderWriter): + """ + The purpose of a reader object is to read data from an image + resource, and should be obtained by calling :func:`.get_reader`. + + A reader can be used as an iterator to read multiple images, + and (if the format permits) only reads data from the file when + new data is requested (i.e. streaming). A reader can also be + used as a context manager so that it is automatically closed. + + Plugins implement Reader's for different formats. Though rare, + plugins may provide additional functionality (beyond what is + provided by the base reader class). + """ + + def get_length(self): + """get_length() + + Get the number of images in the file. (Note: you can also + use ``len(reader_object)``.) + + The result can be: + * 0 for files that only have meta data + * 1 for singleton images (e.g. in PNG, JPEG, etc.) + * N for image series + * inf for streams (series of unknown length) + """ + return self._get_length() + + def get_data(self, index, **kwargs): + """get_data(index, **kwargs) + + Read image data from the file, using the image index. The + returned image has a 'meta' attribute with the meta data. + Raises IndexError if the index is out of range. + + Some formats may support additional keyword arguments. These are + listed in the documentation of those formats. + """ + self._checkClosed() + self._BaseReaderWriter_last_index = index + try: + im, meta = self._get_data(index, **kwargs) + except StopIteration: + raise IndexError(index) + return Array(im, meta) # Array tests im and meta + + def get_next_data(self, **kwargs): + """get_next_data(**kwargs) + + Read the next image from the series. + + Some formats may support additional keyword arguments. These are + listed in the documentation of those formats. + """ + return self.get_data(self._BaseReaderWriter_last_index + 1, **kwargs) + + def set_image_index(self, index, **kwargs): + """set_image_index(index) + + Set the internal pointer such that the next call to + get_next_data() returns the image specified by the index + """ + self._checkClosed() + n = self.get_length() + self._BaseReaderWriter_last_index = min(max(index - 1, -1), n) + + def get_meta_data(self, index=None): + """get_meta_data(index=None) + + Read meta data from the file. using the image index. If the + index is omitted or None, return the file's (global) meta data. + + Note that ``get_data`` also provides the meta data for the returned + image as an attribute of that image. + + The meta data is a dict, which shape depends on the format. + E.g. for JPEG, the dict maps group names to subdicts and each + group is a dict with name-value pairs. The groups represent + the different metadata formats (EXIF, XMP, etc.). + """ + self._checkClosed() + meta = self._get_meta_data(index) + if not isinstance(meta, dict): + raise ValueError( + "Meta data must be a dict, not %r" % meta.__class__.__name__ + ) + return meta + + def iter_data(self): + """iter_data() + + Iterate over all images in the series. (Note: you can also + iterate over the reader object.) + + """ + self._checkClosed() + n = self.get_length() + i = 0 + while i < n: + try: + im, meta = self._get_data(i) + except StopIteration: + return + except IndexError: + if n == float("inf"): + return + raise + yield Array(im, meta) + i += 1 + + # Compatibility + + def __iter__(self): + return self.iter_data() + + def __len__(self): + n = self.get_length() + if n == float("inf"): + n = sys.maxsize + return n + + # To implement + + def _get_length(self): + """_get_length() + + Plugins must implement this. + + The returned scalar specifies the number of images in the series. + See Reader.get_length for more information. + """ + raise NotImplementedError() + + def _get_data(self, index): + """_get_data() + + Plugins must implement this, but may raise an IndexError in + case the plugin does not support random access. + + It should return the image and meta data: (ndarray, dict). + """ + raise NotImplementedError() + + def _get_meta_data(self, index): + """_get_meta_data(index) + + Plugins must implement this. + + It should return the meta data as a dict, corresponding to the + given index, or to the file's (global) meta data if index is + None. + """ + raise NotImplementedError() + + # ----- + + class Writer(_BaseReaderWriter): + """ + The purpose of a writer object is to write data to an image + resource, and should be obtained by calling :func:`.get_writer`. + + A writer will (if the format permits) write data to the file + as soon as new data is provided (i.e. streaming). A writer can + also be used as a context manager so that it is automatically + closed. + + Plugins implement Writer's for different formats. Though rare, + plugins may provide additional functionality (beyond what is + provided by the base writer class). + """ + + def append_data(self, im, meta=None): + """append_data(im, meta={}) + + Append an image (and meta data) to the file. The final meta + data that is used consists of the meta data on the given + image (if applicable), updated with the given meta data. + """ + self._checkClosed() + # Check image data + if not isinstance(im, np.ndarray): + raise ValueError("append_data requires ndarray as first arg") + # Get total meta dict + total_meta = {} + if hasattr(im, "meta") and isinstance(im.meta, dict): + total_meta.update(im.meta) + if meta is None: + pass + elif not isinstance(meta, dict): + raise ValueError("Meta must be a dict.") + else: + total_meta.update(meta) + + # Decouple meta info + im = asarray(im) + # Call + return self._append_data(im, total_meta) + + def set_meta_data(self, meta): + """set_meta_data(meta) + + Sets the file's (global) meta data. The meta data is a dict which + shape depends on the format. E.g. for JPEG the dict maps + group names to subdicts, and each group is a dict with + name-value pairs. The groups represents the different + metadata formats (EXIF, XMP, etc.). + + Note that some meta formats may not be supported for + writing, and individual fields may be ignored without + warning if they are invalid. + """ + self._checkClosed() + if not isinstance(meta, dict): + raise ValueError("Meta must be a dict.") + else: + return self._set_meta_data(meta) + + # To implement + + def _append_data(self, im, meta): + # Plugins must implement this + raise NotImplementedError() + + def _set_meta_data(self, meta): + # Plugins must implement this + raise NotImplementedError() + + +class FormatManager(object): + """ + The FormatManager is a singleton plugin factory. + + The format manager supports getting a format object using indexing (by + format name or extension). When used as an iterator, this object + yields all registered format objects. + + See also :func:`.help`. + """ + + @property + def _formats(self): + available_formats = list() + + for config in known_plugins.values(): + with contextlib.suppress(ImportError): + # if an exception is raised, then format not installed + if config.is_legacy and config.format is not None: + available_formats.append(config) + + return available_formats + + def __repr__(self): + return f"" + + def __iter__(self): + return iter(x.format for x in self._formats) + + def __len__(self): + return len(self._formats) + + def __str__(self): + ss = [] + for config in self._formats: + ext = config.legacy_args["extensions"] + desc = config.legacy_args["description"] + s = f"{config.name} - {desc} [{ext}]" + ss.append(s) + return "\n".join(ss) + + def __getitem__(self, name): + warnings.warn( + "The usage of `FormatManager` is deprecated and it will be " + "removed in Imageio v3. Use `iio.imopen` instead.", + DeprecationWarning, + stacklevel=2, + ) + + if not isinstance(name, str): + raise ValueError( + "Looking up a format should be done by name or by extension." + ) + + if name == "": + raise ValueError("No format matches the empty string.") + + # Test if name is existing file + if Path(name).is_file(): + # legacy compatibility - why test reading here?? + try: + return imopen(name, "r", legacy_mode=True)._format + except ValueError: + # no plugin can read the file + pass + + config = _get_config(name.upper()) + + try: + return config.format + except ImportError: + raise ImportError( + f"The `{config.name}` format is not installed. " + f"Use `pip install imageio[{config.install_name}]` to install it." + ) + + def sort(self, *names): + """sort(name1, name2, name3, ...) + + Sort the formats based on zero or more given names; a format with + a name that matches one of the given names will take precedence + over other formats. A match means an equal name, or ending with + that name (though the former counts higher). Case insensitive. + + Format preference will match the order of the given names: using + ``sort('TIFF', '-FI', '-PIL')`` would prefer the FreeImage formats + over the Pillow formats, but prefer TIFF even more. Each time + this is called, the starting point is the default format order, + and calling ``sort()`` with no arguments will reset the order. + + Be aware that using the function can affect the behavior of + other code that makes use of imageio. + + Also see the ``IMAGEIO_FORMAT_ORDER`` environment variable. + """ + + warnings.warn( + "`FormatManager` is deprecated and it will be removed in ImageIO v3." + " Migrating `FormatManager.sort` depends on your use-case:\n" + "\t- modify `iio.config.known_plugins` to specify the search order for " + "unrecognized formats.\n" + "\t- modify `iio.config.known_extensions[].priority`" + " to control a specific extension.", + DeprecationWarning, + stacklevel=2, + ) + + # Check and sanitize input + for name in names: + if not isinstance(name, str): + raise TypeError("formats.sort() accepts only string names.") + if any(c in name for c in ".,"): + raise ValueError( + "Names given to formats.sort() should not " + "contain dots `.` or commas `,`." + ) + + should_reset = len(names) == 0 + if should_reset: + names = _original_order + + sane_names = [name.strip().upper() for name in names if name != ""] + + # enforce order for every extension that uses it + flat_extensions = [ + ext for ext_list in known_extensions.values() for ext in ext_list + ] + for extension in flat_extensions: + if should_reset: + extension.reset() + continue + + for name in reversed(sane_names): + for plugin in [x for x in extension.default_priority]: + if plugin.endswith(name): + extension.priority.remove(plugin) + extension.priority.insert(0, plugin) + + old_order = known_plugins.copy() + known_plugins.clear() + + for name in sane_names: + plugin = old_order.pop(name, None) + if plugin is not None: + known_plugins[name] = plugin + + known_plugins.update(old_order) + + def add_format(self, iio_format, overwrite=False): + """add_format(format, overwrite=False) + + Register a format, so that imageio can use it. If a format with the + same name already exists, an error is raised, unless overwrite is True, + in which case the current format is replaced. + """ + + warnings.warn( + "`FormatManager` is deprecated and it will be removed in ImageIO v3." + "To migrate `FormatManager.add_format` add the plugin directly to " + "`iio.config.known_plugins`.", + DeprecationWarning, + stacklevel=2, + ) + + if not isinstance(iio_format, Format): + raise ValueError("add_format needs argument to be a Format object") + elif not overwrite and iio_format.name in self.get_format_names(): + raise ValueError( + f"A Format named {iio_format.name} is already registered, use" + " `overwrite=True` to replace." + ) + + config = PluginConfig( + name=iio_format.name.upper(), + class_name=iio_format.__class__.__name__, + module_name=iio_format.__class__.__module__, + is_legacy=True, + install_name="unknown", + legacy_args={ + "name": iio_format.name, + "description": iio_format.description, + "extensions": " ".join(iio_format.extensions), + "modes": iio_format.modes, + }, + ) + + known_plugins[config.name] = config + + for extension in iio_format.extensions: + # be conservative and always treat it as a unique file format + ext = FileExtension( + extension=extension, + priority=[config.name], + name="Unique Format", + description="A format inserted at runtime." + f" It is being read by the `{config.name}` plugin.", + ) + known_extensions.setdefault(extension, list()).append(ext) + + def search_read_format(self, request): + """search_read_format(request) + + Search a format that can read a file according to the given request. + Returns None if no appropriate format was found. (used internally) + """ + + try: + # in legacy_mode imopen returns a LegacyPlugin + return imopen(request, request.mode.io_mode, legacy_mode=True)._format + except AttributeError: + warnings.warn( + "ImageIO now uses a v3 plugin when reading this format." + " Please migrate to the v3 API (preferred) or use imageio.v2.", + DeprecationWarning, + stacklevel=2, + ) + return None + except ValueError: + # no plugin can read this request + # but the legacy API doesn't raise + return None + + def search_write_format(self, request): + """search_write_format(request) + + Search a format that can write a file according to the given request. + Returns None if no appropriate format was found. (used internally) + """ + + try: + # in legacy_mode imopen returns a LegacyPlugin + return imopen(request, request.mode.io_mode, legacy_mode=True)._format + except AttributeError: + warnings.warn( + "ImageIO now uses a v3 plugin when writing this format." + " Please migrate to the v3 API (preferred) or use imageio.v2.", + DeprecationWarning, + stacklevel=2, + ) + return None + except ValueError: + # no plugin can write this request + # but the legacy API doesn't raise + return None + + def get_format_names(self): + """Get the names of all registered formats.""" + + warnings.warn( + "`FormatManager` is deprecated and it will be removed in ImageIO v3." + "To migrate `FormatManager.get_format_names` use `iio.config.known_plugins.keys()` instead.", + DeprecationWarning, + stacklevel=2, + ) + + return [f.name for f in self._formats] + + def show(self): + """Show a nicely formatted list of available formats""" + print(self) diff --git a/.venv/Lib/site-packages/imageio/core/format.pyi b/.venv/Lib/site-packages/imageio/core/format.pyi new file mode 100644 index 00000000..c1c10b1d --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/format.pyi @@ -0,0 +1,87 @@ +from typing import Any, Dict, List, Optional, Union + +import numpy as np + +from ..typing import ArrayLike +from . import Array +from .request import Request +from ..config import PluginConfig + +def _get_config(plugin: str) -> PluginConfig: ... + +class Format(object): + @property + def doc(self) -> str: ... + @property + def name(self) -> str: ... + @property + def description(self) -> str: ... + @property + def extensions(self) -> List[str]: ... + @property + def modes(self) -> str: ... + def __init__( + self, + name: str, + description: str, + extensions: Union[str, list, tuple, None] = None, + modes: str = None, + ) -> None: ... + def __repr__(self) -> str: ... + def __str__(self) -> str: ... + def get_reader(self, request: Request) -> Reader: ... + def get_writer(self, request: Request) -> Writer: ... + def can_read(self, request: Request) -> bool: ... + def can_write(self, request: Request) -> bool: ... + def _can_read(self, request: Request) -> bool: ... + def _can_write(self, request: Request) -> bool: ... + + class _BaseReaderWriter(object): + @property + def format(self) -> Format: ... + @property + def request(self) -> Request: ... + @property + def closed(self) -> bool: ... + def __init__(self, format: Format, request: Request) -> None: ... + def __enter__(self) -> Format._BaseReaderWriter: ... + def __exit__(self, type, value, traceback) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def _checkClosed(self, msg=None) -> None: ... + def _open(self, **kwargs) -> None: ... + def _close(self) -> None: ... + + class Reader(_BaseReaderWriter): + def get_length(self) -> int: ... + def get_data(self, index: int, **kwargs) -> Array: ... + def get_next_data(self, **kwargs) -> Dict[str, Any]: ... + def set_image_index(self, index: int, **kwargs) -> None: ... + def get_meta_data(self, index: int = None) -> Dict[str, Any]: ... + def iter_data(self) -> Array: ... + def __iter__(self) -> Array: ... + def __len__(self) -> int: ... + def _get_length(self) -> int: ... + def _get_data(self, index: int) -> Array: ... + def _get_meta_data(self, index: int) -> Dict[str, Any]: ... + + class Writer(_BaseReaderWriter): + def append_data(self, im: ArrayLike, meta: Dict[str, Any] = None) -> None: ... + def set_meta_data(self, meta: Dict[str, Any]) -> None: ... + def _append_data(self, im: ArrayLike, meta: Dict[str, Any]) -> None: ... + def _set_meta_data(self, meta: Dict[str, Any]) -> None: ... + +class FormatManager(object): + @property + def _formats(self) -> List[Format]: ... + def __repr__(self) -> str: ... + def __iter__(self) -> Format: ... + def __len__(self) -> int: ... + def __str__(self) -> str: ... + def __getitem__(self, name: str) -> Format: ... + def sort(self, *names: str) -> None: ... + def add_format(self, iio_format: Format, overwrite: bool = False) -> None: ... + def search_read_format(self, request: Request) -> Optional[Format]: ... + def search_write_format(self, request: Request) -> Optional[Format]: ... + def get_format_names(self) -> List[str]: ... + def show(self) -> None: ... diff --git a/.venv/Lib/site-packages/imageio/core/imopen.py b/.venv/Lib/site-packages/imageio/core/imopen.py new file mode 100644 index 00000000..a84b2a90 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/imopen.py @@ -0,0 +1,281 @@ +from pathlib import Path +import warnings + +from ..config import known_plugins +from ..config.extensions import known_extensions +from .request import ( + SPECIAL_READ_URIS, + URI_FILENAME, + InitializationError, + IOMode, + Request, +) + + +def imopen( + uri, + io_mode, + *, + plugin=None, + extension=None, + format_hint=None, + legacy_mode=False, + **kwargs, +): + """Open an ImageResource. + + .. warning:: + This warning is for pypy users. If you are not using a context manager, + remember to deconstruct the returned plugin to avoid leaking the file + handle to an unclosed file. + + Parameters + ---------- + uri : str or pathlib.Path or bytes or file or Request + The :doc:`ImageResource <../../user_guide/requests>` to load the + image from. + io_mode : str + The mode in which the file is opened. Possible values are:: + + ``r`` - open the file for reading + ``w`` - open the file for writing + + Depreciated since v2.9: + A second character can be added to give the reader a hint on what + the user expects. This will be ignored by new plugins and will + only have an effect on legacy plugins. Possible values are:: + + ``i`` for a single image, + ``I`` for multiple images, + ``v`` for a single volume, + ``V`` for multiple volumes, + ``?`` for don't care + + plugin : str, Plugin, or None + The plugin to use. If set to None imopen will perform a + search for a matching plugin. If not None, this takes priority over + the provided format hint. + extension : str + If not None, treat the provided ImageResource as if it had the given + extension. This affects the order in which backends are considered, and + when writing this may also influence the format used when encoding. + format_hint : str + Deprecated. Use `extension` instead. + legacy_mode : bool + If true use the v2 behavior when searching for a suitable + plugin. This will ignore v3 plugins and will check ``plugin`` + against known extensions if no plugin with the given name can be found. + **kwargs : Any + Additional keyword arguments will be passed to the plugin upon + construction. + + Notes + ----- + Registered plugins are controlled via the ``known_plugins`` dict in + ``imageio.config``. + + Passing a ``Request`` as the uri is only supported if ``legacy_mode`` + is ``True``. In this case ``io_mode`` is ignored. + + Using the kwarg ``format_hint`` does not enforce the given format. It merely + provides a `hint` to the selection process and plugin. The selection + processes uses this hint for optimization; however, a plugin's decision how + to read a ImageResource will - typically - still be based on the content of + the resource. + + + Examples + -------- + + >>> import imageio.v3 as iio + >>> with iio.imopen("/path/to/image.png", "r") as file: + >>> im = file.read() + + >>> with iio.imopen("/path/to/output.jpg", "w") as file: + >>> file.write(im) + + """ + + if isinstance(uri, Request) and legacy_mode: + warnings.warn( + "`iio.core.Request` is a low-level object and using it" + " directly as input to `imopen` is discouraged. This will raise" + " an exception in ImageIO v3.", + DeprecationWarning, + stacklevel=2, + ) + + request = uri + uri = request.raw_uri + io_mode = request.mode.io_mode + request.format_hint = format_hint + else: + request = Request(uri, io_mode, format_hint=format_hint, extension=extension) + + source = "" if isinstance(uri, bytes) else uri + + # fast-path based on plugin + # (except in legacy mode) + if plugin is not None: + if isinstance(plugin, str): + try: + config = known_plugins[plugin] + except KeyError: + request.finish() + raise ValueError( + f"`{plugin}` is not a registered plugin name." + ) from None + + def loader(request, **kwargs): + return config.plugin_class(request, **kwargs) + + else: + + def loader(request, **kwargs): + return plugin(request, **kwargs) + + try: + return loader(request, **kwargs) + except InitializationError as class_specific: + err_from = class_specific + err_type = RuntimeError if legacy_mode else IOError + err_msg = f"`{plugin}` can not handle the given uri." + except ImportError: + err_from = None + err_type = ImportError + err_msg = ( + f"The `{config.name}` plugin is not installed. " + f"Use `pip install imageio[{config.install_name}]` to install it." + ) + except Exception as generic_error: + err_from = generic_error + err_type = IOError + err_msg = f"An unknown error occurred while initializing plugin `{plugin}`." + + request.finish() + raise err_type(err_msg) from err_from + + # fast-path based on format_hint + if request.format_hint is not None: + for candidate_format in known_extensions[format_hint]: + for plugin_name in candidate_format.priority: + config = known_plugins[plugin_name] + + try: + candidate_plugin = config.plugin_class + except ImportError: + # not installed + continue + + try: + plugin_instance = candidate_plugin(request, **kwargs) + except InitializationError: + # file extension doesn't match file type + continue + + return plugin_instance + else: + resource = ( + "" if isinstance(request.raw_uri, bytes) else request.raw_uri + ) + warnings.warn(f"`{resource}` can not be opened as a `{format_hint}` file.") + + # fast-path based on file extension + if request.extension in known_extensions: + for candidate_format in known_extensions[request.extension]: + for plugin_name in candidate_format.priority: + config = known_plugins[plugin_name] + + try: + candidate_plugin = config.plugin_class + except ImportError: + # not installed + continue + + try: + plugin_instance = candidate_plugin(request, **kwargs) + except InitializationError: + # file extension doesn't match file type + continue + + return plugin_instance + + # error out for read-only special targets + # this is hacky; can we come up with a better solution for this? + if request.mode.io_mode == IOMode.write: + if isinstance(uri, str) and uri.startswith(SPECIAL_READ_URIS): + request.finish() + err_type = ValueError if legacy_mode else IOError + err_msg = f"`{source}` is read-only." + raise err_type(err_msg) + + # error out for directories + # this is a bit hacky and should be cleaned once we decide + # how to gracefully handle DICOM + if request._uri_type == URI_FILENAME and Path(request.raw_uri).is_dir(): + request.finish() + err_type = ValueError if legacy_mode else IOError + err_msg = ( + "ImageIO does not generally support reading folders. " + "Limited support may be available via specific plugins. " + "Specify the plugin explicitly using the `plugin` kwarg, e.g. `plugin='DICOM'`" + ) + raise err_type(err_msg) + + # close the current request here and use fresh/new ones while trying each + # plugin This is slow (means potentially reopening a resource several + # times), but should only happen rarely because this is the fallback if all + # else fails. + request.finish() + + # fallback option: try all plugins + for config in known_plugins.values(): + # each plugin gets its own request + request = Request(uri, io_mode, format_hint=format_hint) + + try: + plugin_instance = config.plugin_class(request, **kwargs) + except InitializationError: + continue + except ImportError: + continue + else: + return plugin_instance + + err_type = ValueError if legacy_mode else IOError + err_msg = f"Could not find a backend to open `{source}`` with iomode `{io_mode}`." + + # check if a missing plugin could help + if request.extension in known_extensions: + missing_plugins = list() + + formats = known_extensions[request.extension] + plugin_names = [ + plugin for file_format in formats for plugin in file_format.priority + ] + for name in plugin_names: + config = known_plugins[name] + + try: + config.plugin_class + continue + except ImportError: + missing_plugins.append(config) + + if len(missing_plugins) > 0: + install_candidates = "\n".join( + [ + ( + f" {config.name}: " + f"pip install imageio[{config.install_name}]" + ) + for config in missing_plugins + ] + ) + err_msg += ( + "\nBased on the extension, the following plugins might add capable backends:\n" + f"{install_candidates}" + ) + + request.finish() + raise err_type(err_msg) diff --git a/.venv/Lib/site-packages/imageio/core/imopen.pyi b/.venv/Lib/site-packages/imageio/core/imopen.pyi new file mode 100644 index 00000000..00659591 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/imopen.pyi @@ -0,0 +1,87 @@ +from typing import Literal, Type, TypeVar, overload + +from ..plugins.opencv import OpenCVPlugin +from ..plugins.pillow import PillowPlugin +from ..plugins.pyav import PyAVPlugin +from ..plugins.tifffile_v3 import TifffilePlugin +from ..typing import ImageResource +from .legacy_plugin_wrapper import LegacyPlugin +from .v3_plugin_api import PluginV3 + +CustomPlugin = TypeVar("CustomPlugin", bound=PluginV3) + +@overload +def imopen( + uri: ImageResource, + io_mode: Literal["r", "w"], + *, + extension: str = None, + format_hint: str = None, +) -> PluginV3: ... +@overload +def imopen( + uri: ImageResource, + io_mode: Literal["r", "w"], + *, + plugin: str = None, + format_hint: str = None, + extension: str = None, + legacy_mode: Literal[True], + **kwargs, +) -> LegacyPlugin: ... +@overload +def imopen( + uri: ImageResource, + io_mode: Literal["r", "w"], + *, + format_hint: str = None, + extension: str = None, + legacy_mode: Literal[False] = False, +) -> PluginV3: ... +@overload +def imopen( + uri: ImageResource, + io_mode: Literal["r", "w"], + *, + plugin: Literal["pillow"], + extension: str = None, + format_hint: str = None, +) -> PillowPlugin: ... +@overload +def imopen( + uri: ImageResource, + io_mode: Literal["r", "w"], + *, + plugin: Literal["pyav"], + extension: str = None, + format_hint: str = None, + container: str = None, +) -> PyAVPlugin: ... +@overload +def imopen( + uri, + io_mode: Literal["r", "w"], + *, + plugin: Literal["opencv"], + extension: str = None, + format_hint: str = None, +) -> OpenCVPlugin: ... +@overload +def imopen( + uri, + io_mode: Literal["r", "w"], + *, + plugin: Literal["tifffile"], + extension: str = None, + format_hint: str = None, +) -> TifffilePlugin: ... +@overload +def imopen( + uri: ImageResource, + io_mode: Literal["r", "w"], + *, + plugin: Type[CustomPlugin], + extension: str = None, + format_hint: str = None, + **kwargs, +) -> CustomPlugin: ... diff --git a/.venv/Lib/site-packages/imageio/core/legacy_plugin_wrapper.py b/.venv/Lib/site-packages/imageio/core/legacy_plugin_wrapper.py new file mode 100644 index 00000000..71d2a0ac --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/legacy_plugin_wrapper.py @@ -0,0 +1,363 @@ +from pathlib import Path + +import numpy as np + +from ..config import known_extensions +from .request import InitializationError, IOMode +from .v3_plugin_api import ImageProperties, PluginV3 + + +def _legacy_default_index(format): + if format._name == "FFMPEG": + index = Ellipsis + elif format._name == "GIF-PIL": + index = Ellipsis + else: + index = 0 + + return index + + +class LegacyPlugin(PluginV3): + """A plugin to make old (v2.9) plugins compatible with v3.0 + + .. depreciated:: 2.9 + `legacy_get_reader` will be removed in a future version of imageio. + `legacy_get_writer` will be removed in a future version of imageio. + + This plugin is a wrapper around the old FormatManager class and exposes + all the old plugins via the new API. On top of this it has + ``legacy_get_reader`` and ``legacy_get_writer`` methods to allow using + it with the v2.9 API. + + Methods + ------- + read(index=None, **kwargs) + Read the image at position ``index``. + write(image, **kwargs) + Write image to the URI. + iter(**kwargs) + Iteratively yield images from the given URI. + get_meta(index=None) + Return the metadata for the image at position ``index``. + legacy_get_reader(**kwargs) + Returns the v2.9 image reader. (depreciated) + legacy_get_writer(**kwargs) + Returns the v2.9 image writer. (depreciated) + + Examples + -------- + + >>> import imageio.v3 as iio + >>> with iio.imopen("/path/to/image.tiff", "r", legacy_mode=True) as file: + >>> reader = file.legacy_get_reader() # depreciated + >>> for im in file.iter(): + >>> print(im.shape) + + """ + + def __init__(self, request, legacy_plugin): + """Instantiate a new Legacy Plugin + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + legacy_plugin : Format + The (legacy) format to use to interface with the URI. + + """ + self._request = request + self._format = legacy_plugin + + source = ( + "" + if isinstance(self._request.raw_uri, bytes) + else self._request.raw_uri + ) + if self._request.mode.io_mode == IOMode.read: + if not self._format.can_read(request): + raise InitializationError( + f"`{self._format.name}`" f" can not read `{source}`." + ) + else: + if not self._format.can_write(request): + raise InitializationError( + f"`{self._format.name}`" f" can not write to `{source}`." + ) + + def legacy_get_reader(self, **kwargs): + """legacy_get_reader(**kwargs) + + a utility method to provide support vor the V2.9 API + + Parameters + ---------- + kwargs : ... + Further keyword arguments are passed to the reader. See :func:`.help` + to see what arguments are available for a particular format. + """ + + # Note: this will break thread-safety + self._request._kwargs = kwargs + + # safeguard for DICOM plugin reading from folders + try: + assert Path(self._request.filename).is_dir() + except OSError: + pass # not a valid path on this OS + except AssertionError: + pass # not a folder + else: + return self._format.get_reader(self._request) + + self._request.get_file().seek(0) + return self._format.get_reader(self._request) + + def read(self, *, index=None, **kwargs): + """ + Parses the given URI and creates a ndarray from it. + + Parameters + ---------- + index : {integer, None} + If the URI contains a list of ndimages return the index-th + image. If None, stack all images into an ndimage along the + 0-th dimension (equivalent to np.stack(imgs, axis=0)). + kwargs : ... + Further keyword arguments are passed to the reader. See + :func:`.help` to see what arguments are available for a particular + format. + + Returns + ------- + ndimage : np.ndarray + A numpy array containing the decoded image data. + + """ + + if index is None: + index = _legacy_default_index(self._format) + + if index is Ellipsis: + img = np.stack([im for im in self.iter(**kwargs)]) + return img + + reader = self.legacy_get_reader(**kwargs) + return reader.get_data(index) + + def legacy_get_writer(self, **kwargs): + """legacy_get_writer(**kwargs) + + Returns a :class:`.Writer` object which can be used to write data + and meta data to the specified file. + + Parameters + ---------- + kwargs : ... + Further keyword arguments are passed to the writer. See :func:`.help` + to see what arguments are available for a particular format. + """ + + # Note: this will break thread-safety + self._request._kwargs = kwargs + return self._format.get_writer(self._request) + + def write(self, ndimage, *, is_batch=None, metadata=None, **kwargs): + """ + Write an ndimage to the URI specified in path. + + If the URI points to a file on the current host and the file does not + yet exist it will be created. If the file exists already, it will be + appended if possible; otherwise, it will be replaced. + + Parameters + ---------- + ndimage : numpy.ndarray + The ndimage or list of ndimages to write. + is_batch : bool + If True, treat the supplied ndimage as a batch of images. If False, + treat the supplied ndimage as a single image. If None, try to + determine ``is_batch`` from the ndimage's shape and ndim. + metadata : dict + The metadata passed to write alongside the image. + kwargs : ... + Further keyword arguments are passed to the writer. See + :func:`.help` to see what arguments are available for a + particular format. + + + Returns + ------- + buffer : bytes + When writing to the special target "", this function will + return the encoded image data as a bytes string. Otherwise it + returns None. + + Notes + ----- + Automatically determining ``is_batch`` may fail for some images due to + shape aliasing. For example, it may classify a channel-first color image + as a batch of gray images. In most cases this automatic deduction works + fine (it has for almost a decade), but if you do have one of those edge + cases (or are worried that you might) consider explicitly setting + ``is_batch``. + + """ + + if is_batch or isinstance(ndimage, (list, tuple)): + pass # ndimage is list of images + elif is_batch is False: + ndimage = [ndimage] + else: + # Write the largest possible block by guessing the meaning of each + # dimension from the shape/ndim and then checking if any batch + # dimensions are left. + ndimage = np.asanyarray(ndimage) + batch_dims = ndimage.ndim + + # two spatial dimensions + batch_dims = max(batch_dims - 2, 0) + + # packed (channel-last) image + if ndimage.ndim >= 3 and ndimage.shape[-1] < 5: + batch_dims = max(batch_dims - 1, 0) + + # format supports volumetric images + ext_infos = known_extensions.get(self._request.extension, list()) + for ext_info in ext_infos: + if self._format.name in ext_info.priority and ext_info.volume_support: + batch_dims = max(batch_dims - 1, 0) + break + + if batch_dims == 0: + ndimage = [ndimage] + + with self.legacy_get_writer(**kwargs) as writer: + for image in ndimage: + image = np.asanyarray(image) + + if image.ndim < 2: + raise ValueError( + "The image must have at least two spatial dimensions." + ) + + if not np.issubdtype(image.dtype, np.number) and not np.issubdtype( + image.dtype, bool + ): + raise ValueError( + f"All images have to be numeric, and not `{image.dtype}`." + ) + + writer.append_data(image, metadata) + + return writer.request.get_result() + + def iter(self, **kwargs): + """Iterate over a list of ndimages given by the URI + + Parameters + ---------- + kwargs : ... + Further keyword arguments are passed to the reader. See + :func:`.help` to see what arguments are available for a particular + format. + """ + + reader = self.legacy_get_reader(**kwargs) + for image in reader: + yield image + + def properties(self, index=None): + """Standardized ndimage metadata. + + Parameters + ---------- + index : int + The index of the ndimage for which to return properties. If the + index is out of bounds a ``ValueError`` is raised. If ``None``, + return the properties for the ndimage stack. If this is impossible, + e.g., due to shape mismatch, an exception will be raised. + + Returns + ------- + properties : ImageProperties + A dataclass filled with standardized image metadata. + + """ + + if index is None: + index = _legacy_default_index(self._format) + + # for backwards compatibility ... actually reads pixel data :( + if index is Ellipsis: + image = self.read(index=0) + n_images = self.legacy_get_reader().get_length() + return ImageProperties( + shape=(n_images, *image.shape), + dtype=image.dtype, + n_images=n_images, + is_batch=True, + ) + + image = self.read(index=index) + return ImageProperties( + shape=image.shape, + dtype=image.dtype, + is_batch=False, + ) + + def get_meta(self, *, index=None): + """Read ndimage metadata from the URI + + Parameters + ---------- + index : {integer, None} + If the URI contains a list of ndimages return the metadata + corresponding to the index-th image. If None, behavior depends on + the used api + + Legacy-style API: return metadata of the first element (index=0) + New-style API: Behavior depends on the used Plugin. + + Returns + ------- + metadata : dict + A dictionary of metadata. + + """ + + return self.metadata(index=index, exclude_applied=False) + + def metadata(self, index=None, exclude_applied: bool = True): + """Format-Specific ndimage metadata. + + Parameters + ---------- + index : int + The index of the ndimage to read. If the index is out of bounds a + ``ValueError`` is raised. If ``None``, global metadata is returned. + exclude_applied : bool + This parameter exists for compatibility and has no effect. Legacy + plugins always report all metadata they find. + + Returns + ------- + metadata : dict + A dictionary filled with format-specific metadata fields and their + values. + + """ + + if index is None: + index = _legacy_default_index(self._format) + + return self.legacy_get_reader().get_meta_data(index=index) + + def __del__(self) -> None: + pass + # turns out we can't close the file here for LegacyPlugin + # because it would break backwards compatibility + # with legacy_get_writer and legacy_get_reader + # self._request.finish() diff --git a/.venv/Lib/site-packages/imageio/core/legacy_plugin_wrapper.pyi b/.venv/Lib/site-packages/imageio/core/legacy_plugin_wrapper.pyi new file mode 100644 index 00000000..52e3ec52 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/legacy_plugin_wrapper.pyi @@ -0,0 +1,27 @@ +import numpy as np +from typing import Optional, Dict, Any, Union, List, Iterator + +from .request import Request +from .v3_plugin_api import PluginV3, ImageProperties +from .format import Format +from ..typing import ArrayLike + +class LegacyPlugin(PluginV3): + def __init__(self, request: Request, legacy_plugin: Format) -> None: ... + def legacy_get_reader(self, **kwargs) -> Format.Reader: ... + def read(self, *, index: Optional[int] = 0, **kwargs) -> np.ndarray: ... + def legacy_get_writer(self, **kwargs) -> Format.Writer: ... + def write( + self, + ndimage: Union[ArrayLike, List[ArrayLike]], + *, + is_batch: bool = None, + **kwargs + ) -> Optional[bytes]: ... + def iter(self, **kwargs) -> Iterator[np.ndarray]: ... + def properties(self, index: Optional[int] = 0) -> ImageProperties: ... + def get_meta(self, *, index: Optional[int] = 0) -> Dict[str, Any]: ... + def metadata( + self, index: Optional[int] = 0, exclude_applied: bool = True + ) -> Dict[str, Any]: ... + def __del__(self) -> None: ... diff --git a/.venv/Lib/site-packages/imageio/core/request.py b/.venv/Lib/site-packages/imageio/core/request.py new file mode 100644 index 00000000..f42da196 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/request.py @@ -0,0 +1,751 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" +Definition of the Request object, which acts as a kind of bridge between +what the user wants and what the plugins can. +""" + +import os +from io import BytesIO +import zipfile +import tempfile +import shutil +import enum +import warnings + +from ..core import urlopen, get_remote_file + +from pathlib import Path +from urllib.parse import urlparse +from typing import Optional + +# URI types +URI_BYTES = 1 +URI_FILE = 2 +URI_FILENAME = 3 +URI_ZIPPED = 4 +URI_HTTP = 5 +URI_FTP = 6 + + +class IOMode(str, enum.Enum): + """Available Image modes + + This is a helper enum for ``Request.Mode`` which is a composite of a + ``Request.ImageMode`` and ``Request.IOMode``. The IOMode that tells the + plugin if the resource should be read from or written to. Available values are + + - read ("r"): Read from the specified resource + - write ("w"): Write to the specified resource + + """ + + read = "r" + write = "w" + + +class ImageMode(str, enum.Enum): + """Available Image modes + + This is a helper enum for ``Request.Mode`` which is a composite of a + ``Request.ImageMode`` and ``Request.IOMode``. The image mode that tells the + plugin the desired (and expected) image shape. Available values are + + - single_image ("i"): Return a single image extending in two spacial + dimensions + - multi_image ("I"): Return a list of images extending in two spacial + dimensions + - single_volume ("v"): Return an image extending into multiple dimensions. + E.g. three spacial dimensions for image stacks, or two spatial and one + time dimension for videos + - multi_volume ("V"): Return a list of images extending into multiple + dimensions. + - any_mode ("?"): Return an image in any format (the plugin decides the + appropriate action). + + """ + + single_image = "i" + multi_image = "I" + single_volume = "v" + multi_volume = "V" + any_mode = "?" + + +@enum.unique +class Mode(str, enum.Enum): + """The mode to use when interacting with the resource + + ``Request.Mode`` is a composite of ``Request.ImageMode`` and + ``Request.IOMode``. The image mode that tells the plugin the desired (and + expected) image shape and the ``Request.IOMode`` tells the plugin the way + the resource should be interacted with. For a detailed description of the + available modes, see the documentation for ``Request.ImageMode`` and + ``Request.IOMode`` respectively. + + Available modes are all combinations of ``Request.IOMode`` and ``Request.ImageMode``: + + - read_single_image ("ri") + - read_multi_image ("rI") + - read_single_volume ("rv") + - read_multi_volume ("rV") + - read_any ("r?") + - write_single_image ("wi") + - write_multi_image ("wI") + - write_single_volume ("wv") + - write_multi_volume ("wV") + - write_any ("w?") + + Examples + -------- + >>> Request.Mode("rI") # a list of simple images should be read from the resource + >>> Request.Mode("wv") # a single volume should be written to the resource + + """ + + read_single_image = "ri" + read_multi_image = "rI" + read_single_volume = "rv" + read_multi_volume = "rV" + read_any = "r?" + write_single_image = "wi" + write_multi_image = "wI" + write_single_volume = "wv" + write_multi_volume = "wV" + write_any = "w?" + + @classmethod + def _missing_(cls, value): + """Enable Mode("r") and Mode("w") + + The sunder method ``_missing_`` is called whenever the constructor fails + to directly look up the corresponding enum value from the given input. + In our case, we use it to convert the modes "r" and "w" (from the v3 + API) into their legacy versions "r?" and "w?". + + More info on _missing_: + https://docs.python.org/3/library/enum.html#supported-sunder-names + """ + + if value == "r": + return cls("r?") + elif value == "w": + return cls("w?") + else: + raise ValueError(f"{value} is no valid Mode.") + + @property + def io_mode(self) -> IOMode: + return IOMode(self.value[0]) + + @property + def image_mode(self) -> ImageMode: + return ImageMode(self.value[1]) + + def __getitem__(self, key): + """For backwards compatibility with the old non-enum modes""" + if key == 0: + return self.io_mode + elif key == 1: + return self.image_mode + else: + raise IndexError(f"Mode has no item {key}") + + +SPECIAL_READ_URIS = "", "" + +# The user can use this string in a write call to get the data back as bytes. +RETURN_BYTES = "" + +# Example images that will be auto-downloaded +EXAMPLE_IMAGES = { + "astronaut.png": "Image of the astronaut Eileen Collins", + "camera.png": "A grayscale image of a photographer", + "checkerboard.png": "Black and white image of a chekerboard", + "wood.jpg": "A (repeatable) texture of wooden planks", + "bricks.jpg": "A (repeatable) texture of stone bricks", + "clock.png": "Photo of a clock with motion blur (Stefan van der Walt)", + "coffee.png": "Image of a cup of coffee (Rachel Michetti)", + "chelsea.png": "Image of Stefan's cat", + "wikkie.png": "Image of Almar's cat", + "coins.png": "Image showing greek coins from Pompeii", + "horse.png": "Image showing the silhouette of a horse (Andreas Preuss)", + "hubble_deep_field.png": "Photograph taken by Hubble telescope (NASA)", + "immunohistochemistry.png": "Immunohistochemical (IHC) staining", + "moon.png": "Image showing a portion of the surface of the moon", + "page.png": "A scanned page of text", + "text.png": "A photograph of handdrawn text", + "chelsea.zip": "The chelsea.png in a zipfile (for testing)", + "chelsea.bsdf": "The chelsea.png in a BSDF file(for testing)", + "newtonscradle.gif": "Animated GIF of a newton's cradle", + "cockatoo.mp4": "Video file of a cockatoo", + "stent.npz": "Volumetric image showing a stented abdominal aorta", + "meadow_cube.jpg": "A cubemap image of a meadow, e.g. to render a skybox.", +} + + +class Request(object): + """ImageResource handling utility. + + Represents a request for reading or saving an image resource. This + object wraps information to that request and acts as an interface + for the plugins to several resources; it allows the user to read + from filenames, files, http, zipfiles, raw bytes, etc., but offer + a simple interface to the plugins via ``get_file()`` and + ``get_local_filename()``. + + For each read/write operation a single Request instance is used and passed + to the can_read/can_write method of a format, and subsequently to + the Reader/Writer class. This allows rudimentary passing of + information between different formats and between a format and + associated reader/writer. + + Parameters + ---------- + uri : {str, bytes, file} + The resource to load the image from. + mode : str + The first character is "r" or "w", indicating a read or write + request. The second character is used to indicate the kind of data: + "i" for an image, "I" for multiple images, "v" for a volume, + "V" for multiple volumes, "?" for don't care. + + """ + + def __init__(self, uri, mode, *, extension=None, format_hint: str = None, **kwargs): + # General + self.raw_uri = uri + self._uri_type = None + self._filename = None + self._extension = None + self._format_hint = None + self._kwargs = kwargs + self._result = None # Some write actions may have a result + + # To handle the user-side + self._filename_zip = None # not None if a zipfile is used + self._bytes = None # Incoming bytes + self._zipfile = None # To store a zipfile instance (if used) + + # To handle the plugin side + self._file = None # To store the file instance + self._file_is_local = False # whether the data needs to be copied at end + self._filename_local = None # not None if using tempfile on this FS + self._firstbytes = None # For easy header parsing + + # To store formats that may be able to fulfil this request + # self._potential_formats = [] + + # Check mode + try: + self._mode = Mode(mode) + except ValueError: + raise ValueError(f"Invalid Request.Mode: {mode}") + + # Parse what was given + self._parse_uri(uri) + + # Set extension + if extension is not None: + if extension[0] != ".": + raise ValueError( + "`extension` should be a file extension starting with a `.`," + f" but is `{extension}`." + ) + self._extension = extension + elif self._filename is not None: + if self._uri_type in (URI_FILENAME, URI_ZIPPED): + path = self._filename + else: + path = urlparse(self._filename).path + ext = Path(path).suffix.lower() + self._extension = ext if ext != "" else None + + if format_hint is not None: + warnings.warn( + "The usage of `format_hint` is deprecated and will be removed " + "in ImageIO v3. Use `extension` instead.", + DeprecationWarning, + ) + + if format_hint is not None and format_hint[0] != ".": + raise ValueError( + "`format_hint` should be a file extension starting with a `.`," + f" but is `{format_hint}`." + ) + + self.format_hint = format_hint + + def _parse_uri(self, uri): + """Try to figure our what we were given""" + is_read_request = self.mode.io_mode is IOMode.read + is_write_request = self.mode.io_mode is IOMode.write + + if isinstance(uri, str): + # Explicit + if uri.startswith("imageio:"): + if is_write_request: + raise RuntimeError("Cannot write to the standard images.") + fn = uri.split(":", 1)[-1].lower() + fn, _, zip_part = fn.partition(".zip/") + if zip_part: + fn += ".zip" + if fn not in EXAMPLE_IMAGES: + raise ValueError("Unknown standard image %r." % fn) + self._uri_type = URI_FILENAME + self._filename = get_remote_file("images/" + fn, auto=True) + if zip_part: + self._filename += "/" + zip_part + elif uri.startswith("http://") or uri.startswith("https://"): + self._uri_type = URI_HTTP + self._filename = uri + elif uri.startswith("ftp://") or uri.startswith("ftps://"): + self._uri_type = URI_FTP + self._filename = uri + elif uri.startswith("file://"): + self._uri_type = URI_FILENAME + self._filename = uri[7:] + elif uri.startswith(SPECIAL_READ_URIS) and is_read_request: + self._uri_type = URI_BYTES + self._filename = uri + elif uri.startswith(RETURN_BYTES) and is_write_request: + self._uri_type = URI_BYTES + self._filename = uri + else: + self._uri_type = URI_FILENAME + self._filename = uri + + elif isinstance(uri, memoryview) and is_read_request: + self._uri_type = URI_BYTES + self._filename = "" + self._bytes = uri.tobytes() + elif isinstance(uri, bytes) and is_read_request: + self._uri_type = URI_BYTES + self._filename = "" + self._bytes = uri + elif isinstance(uri, Path): + self._uri_type = URI_FILENAME + self._filename = str(uri) + # Files + elif is_read_request: + if hasattr(uri, "read") and hasattr(uri, "close"): + self._uri_type = URI_FILE + self._filename = "" + self._file = uri # Data must be read from here + elif is_write_request: + if hasattr(uri, "write") and hasattr(uri, "close"): + self._uri_type = URI_FILE + self._filename = "" + self._file = uri # Data must be written here + + # Expand user dir + if self._uri_type == URI_FILENAME and self._filename.startswith("~"): + self._filename = os.path.expanduser(self._filename) + + # Check if a zipfile + if self._uri_type == URI_FILENAME: + # Search for zip extension followed by a path separator + for needle in [".zip/", ".zip\\"]: + zip_i = self._filename.lower().find(needle) + if zip_i > 0: + zip_i += 4 + zip_path = self._filename[:zip_i] + if os.path.isdir(zip_path): + pass # is an existing dir (see #548) + elif is_write_request or os.path.isfile(zip_path): + self._uri_type = URI_ZIPPED + self._filename_zip = ( + zip_path, + self._filename[zip_i:].lstrip("/\\"), + ) + break + + # Check if we could read it + if self._uri_type is None: + uri_r = repr(uri) + if len(uri_r) > 60: + uri_r = uri_r[:57] + "..." + raise IOError("Cannot understand given URI: %s." % uri_r) + + # Check if this is supported + noWriting = [URI_HTTP, URI_FTP] + if is_write_request and self._uri_type in noWriting: + raise IOError("imageio does not support writing to http/ftp.") + + # Deprecated way to load standard images, give a sensible error message + if is_read_request and self._uri_type in [URI_FILENAME, URI_ZIPPED]: + fn = self._filename + if self._filename_zip: + fn = self._filename_zip[0] + if (not os.path.exists(fn)) and (fn in EXAMPLE_IMAGES): + raise IOError( + "No such file: %r. This file looks like one of " + "the standard images, but from imageio 2.1, " + "standard images have to be specified using " + '"imageio:%s".' % (fn, fn) + ) + + # Make filename absolute + if self._uri_type in [URI_FILENAME, URI_ZIPPED]: + if self._filename_zip: + self._filename_zip = ( + os.path.abspath(self._filename_zip[0]), + self._filename_zip[1], + ) + else: + self._filename = os.path.abspath(self._filename) + + # Check whether file name is valid + if self._uri_type in [URI_FILENAME, URI_ZIPPED]: + fn = self._filename + if self._filename_zip: + fn = self._filename_zip[0] + if is_read_request: + # Reading: check that the file exists (but is allowed a dir) + if not os.path.exists(fn): + raise FileNotFoundError("No such file: '%s'" % fn) + else: + # Writing: check that the directory to write to does exist + dn = os.path.dirname(fn) + if not os.path.exists(dn): + raise FileNotFoundError("The directory %r does not exist" % dn) + + @property + def filename(self): + """Name of the ImageResource. + + + The uri for which reading/saving was requested. This + can be a filename, an http address, or other resource + identifier. Do not rely on the filename to obtain the data, + but use ``get_file()`` or ``get_local_filename()`` instead. + """ + return self._filename + + @property + def extension(self) -> str: + """The (lowercase) extension of the requested filename. + Suffixes in url's are stripped. Can be None if the request is + not based on a filename. + """ + return self._extension + + @property + def format_hint(self) -> Optional[str]: + return self._format_hint + + @format_hint.setter + def format_hint(self, format: str) -> None: + self._format_hint = format + if self._extension is None: + self._extension = format + + @property + def mode(self): + """The mode of the request. The first character is "r" or "w", + indicating a read or write request. The second character is + used to indicate the kind of data: + "i" for an image, "I" for multiple images, "v" for a volume, + "V" for multiple volumes, "?" for don't care. + """ + return self._mode + + @property + def kwargs(self): + """The dict of keyword arguments supplied by the user.""" + return self._kwargs + + # For obtaining data + + def get_file(self): + """get_file() + Get a file object for the resource associated with this request. + If this is a reading request, the file is in read mode, + otherwise in write mode. This method is not thread safe. Plugins + should not close the file when done. + + This is the preferred way to read/write the data. But if a + format cannot handle file-like objects, they should use + ``get_local_filename()``. + """ + want_to_write = self.mode.io_mode is IOMode.write + + # Is there already a file? + # Either _uri_type == URI_FILE, or we already opened the file, + # e.g. by using firstbytes + if self._file is not None: + return self._file + + if self._uri_type == URI_BYTES: + if want_to_write: + # Create new file object, we catch the bytes in finish() + self._file = BytesIO() + self._file_is_local = True + else: + self._file = BytesIO(self._bytes) + + elif self._uri_type == URI_FILENAME: + if want_to_write: + self._file = open(self.filename, "wb") + else: + self._file = open(self.filename, "rb") + + elif self._uri_type == URI_ZIPPED: + # Get the correct filename + filename, name = self._filename_zip + if want_to_write: + # Create new file object, we catch the bytes in finish() + self._file = BytesIO() + self._file_is_local = True + else: + # Open zipfile and open new file object for specific file + self._zipfile = zipfile.ZipFile(filename, "r") + self._file = self._zipfile.open(name, "r") + self._file = SeekableFileObject(self._file) + + elif self._uri_type in [URI_HTTP or URI_FTP]: + assert not want_to_write # This should have been tested in init + timeout = os.getenv("IMAGEIO_REQUEST_TIMEOUT") + if timeout is None or not timeout.isdigit(): + timeout = 5 + self._file = urlopen(self.filename, timeout=float(timeout)) + self._file = SeekableFileObject(self._file) + + return self._file + + def get_local_filename(self): + """get_local_filename() + If the filename is an existing file on this filesystem, return + that. Otherwise a temporary file is created on the local file + system which can be used by the format to read from or write to. + """ + + if self._uri_type == URI_FILENAME: + return self._filename + else: + # Get filename + if self.extension is not None: + ext = self.extension + else: + ext = os.path.splitext(self._filename)[1] + fd, self._filename_local = tempfile.mkstemp(ext, "imageio_") + os.close(fd) + # Write stuff to it? + if self.mode.io_mode == IOMode.read: + with open(self._filename_local, "wb") as file: + shutil.copyfileobj(self.get_file(), file) + return self._filename_local + + def finish(self) -> None: + """Wrap up this request. + + Finishes any pending reads or writes, closes any open files and frees + any resources allocated by this request. + """ + + if self.mode.io_mode == IOMode.write: + # See if we "own" the data and must put it somewhere + bytes = None + if self._filename_local: + bytes = Path(self._filename_local).read_bytes() + elif self._file_is_local: + self._file_is_local = False + bytes = self._file.getvalue() + + # Put the data in the right place + if bytes is not None: + if self._uri_type == URI_BYTES: + self._result = bytes # Picked up by imread function + elif self._uri_type == URI_FILE: + self._file.write(bytes) + elif self._uri_type == URI_ZIPPED: + zf = zipfile.ZipFile(self._filename_zip[0], "a") + zf.writestr(self._filename_zip[1], bytes) + zf.close() + # elif self._uri_type == URI_FILENAME: -> is always direct + # elif self._uri_type == URI_FTP/HTTP: -> write not supported + + # Close open files that we know of (and are responsible for) + if self._file and self._uri_type != URI_FILE: + self._file.close() + self._file = None + if self._zipfile: + self._zipfile.close() + self._zipfile = None + + # Remove temp file + if self._filename_local: + try: + os.remove(self._filename_local) + except Exception: # pragma: no cover + warnings.warn( + "Failed to delete the temporary file at " + f"`{self._filename_local}`. Please report this issue." + ) + self._filename_local = None + + # Detach so gc can clean even if a reference of self lingers + self._bytes = None + + def get_result(self): + """For internal use. In some situations a write action can have + a result (bytes data). That is obtained with this function. + """ + # Is there a reason to disallow reading multiple times? + self._result, res = None, self._result + return res + + @property + def firstbytes(self): + """The first 256 bytes of the file. These can be used to + parse the header to determine the file-format. + """ + if self._firstbytes is None: + self._read_first_bytes() + return self._firstbytes + + def _read_first_bytes(self, N=256): + if self._bytes is not None: + self._firstbytes = self._bytes[:N] + else: + # Prepare + try: + f = self.get_file() + except IOError: + if os.path.isdir(self.filename): # A directory, e.g. for DICOM + self._firstbytes = bytes() + return + raise + try: + i = f.tell() + except Exception: + i = None + # Read + self._firstbytes = read_n_bytes(f, N) + # Set back + try: + if i is None: + raise Exception("cannot seek with None") + f.seek(i) + except Exception: + # Prevent get_file() from reusing the file + self._file = None + # If the given URI was a file object, we have a problem, + if self._uri_type == URI_FILE: + raise IOError("Cannot seek back after getting firstbytes!") + + +def read_n_bytes(f, N): + """read_n_bytes(file, n) + + Read n bytes from the given file, or less if the file has less + bytes. Returns zero bytes if the file is closed. + """ + bb = bytes() + while len(bb) < N: + extra_bytes = f.read(N - len(bb)) + if not extra_bytes: + break + bb += extra_bytes + return bb + + +class SeekableFileObject: + """A readonly wrapper file object that add support for seeking, even if + the wrapped file object does not. The allows us to stream from http and + still use Pillow. + """ + + def __init__(self, f): + self.f = f + self._i = 0 # >=0 but can exceed buffer + self._buffer = b"" + self._have_all = False + self.closed = False + + def read(self, n=None): + # Fix up n + if n is None: + pass + else: + n = int(n) + if n < 0: + n = None + + # Can and must we read more? + if not self._have_all: + more = b"" + if n is None: + more = self.f.read() + self._have_all = True + else: + want_i = self._i + n + want_more = want_i - len(self._buffer) + if want_more > 0: + more = self.f.read(want_more) + if len(more) < want_more: + self._have_all = True + self._buffer += more + + # Read data from buffer and update pointer + if n is None: + res = self._buffer[self._i :] + else: + res = self._buffer[self._i : self._i + n] + self._i += len(res) + + return res + + def tell(self): + return self._i + + def seek(self, i, mode=0): + # Mimic BytesIO behavior + + # Get the absolute new position + i = int(i) + if mode == 0: + if i < 0: + raise ValueError("negative seek value " + str(i)) + real_i = i + elif mode == 1: + real_i = max(0, self._i + i) # negative ok here + elif mode == 2: + if not self._have_all: + self.read() + real_i = max(0, len(self._buffer) + i) + else: + raise ValueError("invalid whence (%s, should be 0, 1 or 2)" % i) + + # Read some? + if real_i <= len(self._buffer): + pass # no need to read + elif not self._have_all: + assert real_i > self._i # if we don't have all, _i cannot be > _buffer + self.read(real_i - self._i) # sets self._i + + self._i = real_i + return self._i + + def close(self): + self.closed = True + self.f.close() + + def isatty(self): + return False + + def seekable(self): + return True + + +class InitializationError(Exception): + """The plugin could not initialize from the given request. + + This is a _internal_ error that is raised by plugins that fail to handle + a given request. We use this to differentiate incompatibility between + a plugin and a request from an actual error/bug inside a plugin. + + """ + + pass diff --git a/.venv/Lib/site-packages/imageio/core/request.pyi b/.venv/Lib/site-packages/imageio/core/request.pyi new file mode 100644 index 00000000..8f73a0b4 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/request.pyi @@ -0,0 +1,90 @@ +from typing import BinaryIO, Optional, Dict, Any, Sequence, overload, Literal +from ..typing import ImageResource +import enum + +EXAMPLE_IMAGES: Dict[str, str] +RETURN_BYTES = "" +URI_BYTES = 1 +URI_FILE = 2 +URI_FILENAME = 3 +URI_ZIPPED = 4 +URI_HTTP = 5 +URI_FTP = 6 + +class IOMode(str, enum.Enum): + read = "r" + write = "w" + +class ImageMode(str, enum.Enum): + single_image = "i" + multi_image = "I" + single_volume = "v" + multi_volume = "V" + any_mode = "?" + +@enum.unique +class Mode(str, enum.Enum): + read_single_image = "ri" + read_multi_image = "rI" + read_single_volume = "rv" + read_multi_volume = "rV" + read_any = "r?" + write_single_image = "wi" + write_multi_image = "wI" + write_single_volume = "wv" + write_multi_volume = "wV" + write_any = "w?" + + @classmethod + def _missing_(cls, value: Any) -> Mode: ... + @property + def io_mode(self) -> IOMode: ... + @property + def image_mode(self) -> ImageMode: ... + +class InitializationError(Exception): ... + +class Request(object): + _uri_type: int + raw_uri: ImageResource + + @property + def filename(self) -> str: ... + @property + def extension(self) -> str: ... + @property + def format_hint(self) -> Optional[str]: ... + @format_hint.setter + def format_hint(self, format: str) -> None: ... + @property + def mode(self) -> Mode: ... + @property + def kwargs(self) -> Dict[str, Any]: ... + @property + def firstbytes(self) -> bytes: ... + def __init__( + self, + uri: ImageResource, + mode: str, + *, + extension: str = None, + format_hint: str = None, + **kwargs + ) -> None: ... + def _parse_uri(self, uri: ImageResource) -> None: ... + def get_file(self) -> BinaryIO: ... + def get_local_filename(self) -> str: ... + def finish(self) -> None: ... + def get_result(self) -> Optional[bytes]: ... + def _read_first_bytes(self, N: int = 256) -> bytes: ... + +def read_n_bytes(f: BinaryIO, N: int) -> bytes: ... + +class SeekableFileObject: + def __init__(self, f: BinaryIO) -> None: ... + def read(self, n: int = None) -> bytes: ... + def tell(self) -> int: ... + def seek(self, i: int, mode: int = 0) -> int: ... + def close(self) -> None: ... + def isatty(self) -> bool: ... + def seekable(self) -> bool: ... diff --git a/.venv/Lib/site-packages/imageio/core/util.py b/.venv/Lib/site-packages/imageio/core/util.py new file mode 100644 index 00000000..6b283042 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/util.py @@ -0,0 +1,559 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" +Various utilities for imageio +""" + + +from collections import OrderedDict +import numpy as np +import os +import re +import struct +import sys +import time +import logging + + +logger = logging.getLogger("imageio") + +IS_PYPY = "__pypy__" in sys.builtin_module_names +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + + +def urlopen(*args, **kwargs): + """Compatibility function for the urlopen function. Raises an + RuntimeError if urlopen could not be imported (which can occur in + frozen applications. + """ + try: + from urllib.request import urlopen + except ImportError: + raise RuntimeError("Could not import urlopen.") + return urlopen(*args, **kwargs) + + +def _precision_warn(p1, p2, extra=""): + t = ( + "Lossy conversion from {} to {}. {} Convert image to {} prior to " + "saving to suppress this warning." + ) + logger.warning(t.format(p1, p2, extra, p2)) + + +def image_as_uint(im, bitdepth=None): + """Convert the given image to uint (default: uint8) + + If the dtype already matches the desired format, it is returned + as-is. If the image is float, and all values are between 0 and 1, + the values are multiplied by np.power(2.0, bitdepth). In all other + situations, the values are scaled such that the minimum value + becomes 0 and the maximum value becomes np.power(2.0, bitdepth)-1 + (255 for 8-bit and 65535 for 16-bit). + """ + if not bitdepth: + bitdepth = 8 + if not isinstance(im, np.ndarray): + raise ValueError("Image must be a numpy array") + if bitdepth == 8: + out_type = np.uint8 + elif bitdepth == 16: + out_type = np.uint16 + else: + raise ValueError("Bitdepth must be either 8 or 16") + dtype_str1 = str(im.dtype) + dtype_str2 = out_type.__name__ + if (im.dtype == np.uint8 and bitdepth == 8) or ( + im.dtype == np.uint16 and bitdepth == 16 + ): + # Already the correct format? Return as-is + return im + if dtype_str1.startswith("float") and np.nanmin(im) >= 0 and np.nanmax(im) <= 1: + _precision_warn(dtype_str1, dtype_str2, "Range [0, 1].") + im = im.astype(np.float64) * (np.power(2.0, bitdepth) - 1) + 0.499999999 + elif im.dtype == np.uint16 and bitdepth == 8: + _precision_warn(dtype_str1, dtype_str2, "Losing 8 bits of resolution.") + im = np.right_shift(im, 8) + elif im.dtype == np.uint32: + _precision_warn( + dtype_str1, + dtype_str2, + "Losing {} bits of resolution.".format(32 - bitdepth), + ) + im = np.right_shift(im, 32 - bitdepth) + elif im.dtype == np.uint64: + _precision_warn( + dtype_str1, + dtype_str2, + "Losing {} bits of resolution.".format(64 - bitdepth), + ) + im = np.right_shift(im, 64 - bitdepth) + else: + mi = np.nanmin(im) + ma = np.nanmax(im) + if not np.isfinite(mi): + raise ValueError("Minimum image value is not finite") + if not np.isfinite(ma): + raise ValueError("Maximum image value is not finite") + if ma == mi: + return im.astype(out_type) + _precision_warn(dtype_str1, dtype_str2, "Range [{}, {}].".format(mi, ma)) + # Now make float copy before we scale + im = im.astype("float64") + # Scale the values between 0 and 1 then multiply by the max value + im = (im - mi) / (ma - mi) * (np.power(2.0, bitdepth) - 1) + 0.499999999 + assert np.nanmin(im) >= 0 + assert np.nanmax(im) < np.power(2.0, bitdepth) + return im.astype(out_type) + + +class Array(np.ndarray): + """Array(array, meta=None) + + A subclass of np.ndarray that has a meta attribute. Get the dictionary + that contains the meta data using ``im.meta``. Convert to a plain numpy + array using ``np.asarray(im)``. + + """ + + def __new__(cls, array, meta=None): + # Check + if not isinstance(array, np.ndarray): + raise ValueError("Array expects a numpy array.") + if not (meta is None or isinstance(meta, dict)): + raise ValueError("Array expects meta data to be a dict.") + # Convert and return + meta = meta if meta is not None else getattr(array, "meta", {}) + try: + ob = array.view(cls) + except AttributeError: # pragma: no cover + # Just return the original; no metadata on the array in Pypy! + return array + ob._copy_meta(meta) + return ob + + def _copy_meta(self, meta): + """Make a 2-level deep copy of the meta dictionary.""" + self._meta = Dict() + for key, val in meta.items(): + if isinstance(val, dict): + val = Dict(val) # Copy this level + self._meta[key] = val + + @property + def meta(self): + """The dict with the meta data of this image.""" + return self._meta + + def __array_finalize__(self, ob): + """So the meta info is maintained when doing calculations with + the array. + """ + if isinstance(ob, Array): + self._copy_meta(ob.meta) + else: + self._copy_meta({}) + + def __array_wrap__(self, out, context=None): + """So that we return a native numpy array (or scalar) when a + reducting ufunc is applied (such as sum(), std(), etc.) + """ + if not out.shape: + return out.dtype.type(out) # Scalar + elif out.shape != self.shape: + return out.view(type=np.ndarray) + else: + return out # Type Array + + +Image = Array # Alias for backwards compatibility + + +def asarray(a): + """Pypy-safe version of np.asarray. Pypy's np.asarray consumes a + *lot* of memory if the given array is an ndarray subclass. This + function does not. + """ + if isinstance(a, np.ndarray): + if IS_PYPY: # pragma: no cover + a = a.copy() # pypy has issues with base views + plain = a.view(type=np.ndarray) + return plain + return np.asarray(a) + + +class Dict(OrderedDict): + """A dict in which the keys can be get and set as if they were + attributes. Very convenient in combination with autocompletion. + + This Dict still behaves as much as possible as a normal dict, and + keys can be anything that are otherwise valid keys. However, + keys that are not valid identifiers or that are names of the dict + class (such as 'items' and 'copy') cannot be get/set as attributes. + """ + + __reserved_names__ = dir(OrderedDict()) # Also from OrderedDict + __pure_names__ = dir(dict()) + + def __getattribute__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError: + if key in self: + return self[key] + else: + raise + + def __setattr__(self, key, val): + if key in Dict.__reserved_names__: + # Either let OrderedDict do its work, or disallow + if key not in Dict.__pure_names__: + return OrderedDict.__setattr__(self, key, val) + else: + raise AttributeError( + "Reserved name, this key can only " + + "be set via ``d[%r] = X``" % key + ) + else: + # if isinstance(val, dict): val = Dict(val) -> no, makes a copy! + self[key] = val + + def __dir__(self): + def isidentifier(x): + return bool(re.match(r"[a-z_]\w*$", x, re.I)) + + names = [k for k in self.keys() if (isinstance(k, str) and isidentifier(k))] + return Dict.__reserved_names__ + names + + +class BaseProgressIndicator(object): + """BaseProgressIndicator(name) + + A progress indicator helps display the progress of a task to the + user. Progress can be pending, running, finished or failed. + + Each task has: + * a name - a short description of what needs to be done. + * an action - the current action in performing the task (e.g. a subtask) + * progress - how far the task is completed + * max - max number of progress units. If 0, the progress is indefinite + * unit - the units in which the progress is counted + * status - 0: pending, 1: in progress, 2: finished, 3: failed + + This class defines an abstract interface. Subclasses should implement + _start, _stop, _update_progress(progressText), _write(message). + """ + + def __init__(self, name): + self._name = name + self._action = "" + self._unit = "" + self._max = 0 + self._status = 0 + self._last_progress_update = 0 + + def start(self, action="", unit="", max=0): + """start(action='', unit='', max=0) + + Start the progress. Optionally specify an action, a unit, + and a maximum progress value. + """ + if self._status == 1: + self.finish() + self._action = action + self._unit = unit + self._max = max + # + self._progress = 0 + self._status = 1 + self._start() + + def status(self): + """status() + + Get the status of the progress - 0: pending, 1: in progress, + 2: finished, 3: failed + """ + return self._status + + def set_progress(self, progress=0, force=False): + """set_progress(progress=0, force=False) + + Set the current progress. To avoid unnecessary progress updates + this will only have a visual effect if the time since the last + update is > 0.1 seconds, or if force is True. + """ + self._progress = progress + # Update or not? + if not (force or (time.time() - self._last_progress_update > 0.1)): + return + self._last_progress_update = time.time() + # Compose new string + unit = self._unit or "" + progressText = "" + if unit == "%": + progressText = "%2.1f%%" % progress + elif self._max > 0: + percent = 100 * float(progress) / self._max + progressText = "%i/%i %s (%2.1f%%)" % (progress, self._max, unit, percent) + elif progress > 0: + if isinstance(progress, float): + progressText = "%0.4g %s" % (progress, unit) + else: + progressText = "%i %s" % (progress, unit) + # Update + self._update_progress(progressText) + + def increase_progress(self, extra_progress): + """increase_progress(extra_progress) + + Increase the progress by a certain amount. + """ + self.set_progress(self._progress + extra_progress) + + def finish(self, message=None): + """finish(message=None) + + Finish the progress, optionally specifying a message. This will + not set the progress to the maximum. + """ + self.set_progress(self._progress, True) # fore update + self._status = 2 + self._stop() + if message is not None: + self._write(message) + + def fail(self, message=None): + """fail(message=None) + + Stop the progress with a failure, optionally specifying a message. + """ + self.set_progress(self._progress, True) # fore update + self._status = 3 + self._stop() + message = "FAIL " + (message or "") + self._write(message) + + def write(self, message): + """write(message) + + Write a message during progress (such as a warning). + """ + if self.__class__ == BaseProgressIndicator: + # When this class is used as a dummy, print explicit message + print(message) + else: + return self._write(message) + + # Implementing classes should implement these + + def _start(self): + pass + + def _stop(self): + pass + + def _update_progress(self, progressText): + pass + + def _write(self, message): + pass + + +class StdoutProgressIndicator(BaseProgressIndicator): + """StdoutProgressIndicator(name) + + A progress indicator that shows the progress in stdout. It + assumes that the tty can appropriately deal with backspace + characters. + """ + + def _start(self): + self._chars_prefix, self._chars = "", "" + # Write message + if self._action: + self._chars_prefix = "%s (%s): " % (self._name, self._action) + else: + self._chars_prefix = "%s: " % self._name + sys.stdout.write(self._chars_prefix) + sys.stdout.flush() + + def _update_progress(self, progressText): + # If progress is unknown, at least make something move + if not progressText: + i1, i2, i3, i4 = "-\\|/" + M = {i1: i2, i2: i3, i3: i4, i4: i1} + progressText = M.get(self._chars, i1) + # Store new string and write + delChars = "\b" * len(self._chars) + self._chars = progressText + sys.stdout.write(delChars + self._chars) + sys.stdout.flush() + + def _stop(self): + self._chars = self._chars_prefix = "" + sys.stdout.write("\n") + sys.stdout.flush() + + def _write(self, message): + # Write message + delChars = "\b" * len(self._chars_prefix + self._chars) + sys.stdout.write(delChars + " " + message + "\n") + # Reprint progress text + sys.stdout.write(self._chars_prefix + self._chars) + sys.stdout.flush() + + +# From pyzolib/paths.py (https://bitbucket.org/pyzo/pyzolib/src/tip/paths.py) +def appdata_dir(appname=None, roaming=False): + """appdata_dir(appname=None, roaming=False) + + Get the path to the application directory, where applications are allowed + to write user specific files (e.g. configurations). For non-user specific + data, consider using common_appdata_dir(). + If appname is given, a subdir is appended (and created if necessary). + If roaming is True, will prefer a roaming directory (Windows Vista/7). + """ + + # Define default user directory + userDir = os.getenv("IMAGEIO_USERDIR", None) + if userDir is None: + userDir = os.path.expanduser("~") + if not os.path.isdir(userDir): # pragma: no cover + userDir = "/var/tmp" # issue #54 + + # Get system app data dir + path = None + if sys.platform.startswith("win"): + path1, path2 = os.getenv("LOCALAPPDATA"), os.getenv("APPDATA") + path = (path2 or path1) if roaming else (path1 or path2) + elif sys.platform.startswith("darwin"): + path = os.path.join(userDir, "Library", "Application Support") + # On Linux and as fallback + if not (path and os.path.isdir(path)): + path = userDir + + # Maybe we should store things local to the executable (in case of a + # portable distro or a frozen application that wants to be portable) + prefix = sys.prefix + if getattr(sys, "frozen", None): + prefix = os.path.abspath(os.path.dirname(sys.executable)) + for reldir in ("settings", "../settings"): + localpath = os.path.abspath(os.path.join(prefix, reldir)) + if os.path.isdir(localpath): # pragma: no cover + try: + open(os.path.join(localpath, "test.write"), "wb").close() + os.remove(os.path.join(localpath, "test.write")) + except IOError: + pass # We cannot write in this directory + else: + path = localpath + break + + # Get path specific for this app + if appname: + if path == userDir: + appname = "." + appname.lstrip(".") # Make it a hidden directory + path = os.path.join(path, appname) + if not os.path.isdir(path): # pragma: no cover + os.makedirs(path, exist_ok=True) + + # Done + return path + + +def resource_dirs(): + """resource_dirs() + + Get a list of directories where imageio resources may be located. + The first directory in this list is the "resources" directory in + the package itself. The second directory is the appdata directory + (~/.imageio on Linux). The list further contains the application + directory (for frozen apps), and may include additional directories + in the future. + """ + dirs = [resource_package_dir()] + # Resource dir baked in the package. + # Appdata directory + try: + dirs.append(appdata_dir("imageio")) + except Exception: # pragma: no cover + pass # The home dir may not be writable + # Directory where the app is located (mainly for frozen apps) + if getattr(sys, "frozen", None): + dirs.append(os.path.abspath(os.path.dirname(sys.executable))) + elif sys.path and sys.path[0]: + dirs.append(os.path.abspath(sys.path[0])) + return dirs + + +def resource_package_dir(): + """package_dir + + Get the resources directory in the imageio package installation + directory. + + Notes + ----- + This is a convenience method that is used by `resource_dirs` and + imageio entry point scripts. + """ + # Make pkg_resources optional if setuptools is not available + try: + # Avoid importing pkg_resources in the top level due to how slow it is + # https://github.com/pypa/setuptools/issues/510 + import pkg_resources + except ImportError: + pkg_resources = None + + if pkg_resources: + # The directory returned by `pkg_resources.resource_filename` + # also works with eggs. + pdir = pkg_resources.resource_filename("imageio", "resources") + else: + # If setuptools is not available, use fallback + pdir = os.path.abspath(os.path.join(THIS_DIR, "..", "resources")) + return pdir + + +def get_platform(): + """get_platform() + + Get a string that specifies the platform more specific than + sys.platform does. The result can be: linux32, linux64, win32, + win64, osx32, osx64. Other platforms may be added in the future. + """ + # Get platform + if sys.platform.startswith("linux"): + plat = "linux%i" + elif sys.platform.startswith("win"): + plat = "win%i" + elif sys.platform.startswith("darwin"): + plat = "osx%i" + elif sys.platform.startswith("freebsd"): + plat = "freebsd%i" + else: # pragma: no cover + return None + + return plat % (struct.calcsize("P") * 8) # 32 or 64 bits + + +def has_module(module_name): + """Check to see if a python module is available.""" + if sys.version_info > (3, 4): + import importlib + + name_parts = module_name.split(".") + for i in range(len(name_parts)): + if importlib.util.find_spec(".".join(name_parts[: i + 1])) is None: + return False + return True + else: # pragma: no cover + import imp + + try: + imp.find_module(module_name) + except ImportError: + return False + return True diff --git a/.venv/Lib/site-packages/imageio/core/v3_plugin_api.py b/.venv/Lib/site-packages/imageio/core/v3_plugin_api.py new file mode 100644 index 00000000..871d1945 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/core/v3_plugin_api.py @@ -0,0 +1,370 @@ +from . import Request +from ..typing import ArrayLike +import numpy as np +from typing import Optional, Dict, Any, Tuple, Union, List, Iterator +from dataclasses import dataclass + + +@dataclass +class ImageProperties: + """Standardized Metadata + + ImageProperties represent a set of standardized metadata that is available + under the same name for every supported format. If the ImageResource (or + format) does not specify the value, a sensible default value is chosen + instead. + + Attributes + ---------- + shape : Tuple[int, ...] + The shape of the loaded ndimage. + dtype : np.dtype + The dtype of the loaded ndimage. + n_images : int + Number of images in the file if ``index=...``, `None` for single images. + is_batch : bool + If True, the first dimension of the ndimage represents a batch dimension + along which several images are stacked. + spacing : Tuple + A tuple describing the spacing between pixels along each axis of the + ndimage. If the spacing is uniform along an axis the value corresponding + to that axis is a single float. If the spacing is non-uniform, the value + corresponding to that axis is a tuple in which the i-th element + indicates the spacing between the i-th and (i+1)-th pixel along that + axis. + + """ + + shape: Tuple[int, ...] + dtype: np.dtype + n_images: Optional[int] = None + is_batch: bool = False + spacing: Optional[tuple] = None + + +class PluginV3: + """A ImageIO Plugin. + + This is an abstract plugin that documents the v3 plugin API interface. A + plugin is an adapter/wrapper around a backend that converts a request from + iio.core (e.g., read an image from file) into a sequence of instructions for + the backend that fulfill the request. + + Plugin authors may choose to subclass this class when implementing a new + plugin, but aren't obliged to do so. As long as the plugin class implements + the interface (methods) described below the ImageIO core will treat it just + like any other plugin. + + + Parameters + ---------- + request : iio.Request + A request object that represents the users intent. It provides a + standard interface to access the various ImageResources and serves them + to the plugin as a file object (or file). Check the docs for details. + **kwargs : Any + Additional configuration arguments for the plugin or backend. Usually + these match the configuration arguments available on the backend and + are forwarded to it. + + + Raises + ------ + InitializationError + During ``__init__`` the plugin tests if it can fulfill the request. If + it can't, e.g., because the request points to a file in the wrong + format, then it should raise an ``InitializationError`` and provide a + reason for failure. This reason may be reported to the user. + ImportError + Plugins will be imported dynamically when listed in + ``iio.config.known_plugins`` to fulfill requests. This way, users only + have to load plugins/backends they actually use. If this plugin's backend + is not installed, it should raise an ``ImportError`` either during + module import or during class construction. + + Notes + ----- + Upon successful construction the plugin takes ownership of the provided + request. This means that it is the plugin's responsibility to call + request.finish() to close the resource when it is no longer needed. + + Plugins _must_ implement a context manager that closes and cleans any + resources held by the plugin upon exit. + + """ + + def __init__(self, request: Request) -> None: + """Initialize a new Plugin Instance. + + See Plugin's docstring for detailed documentation. + + Notes + ----- + The implementation here stores the request as a local variable that is + exposed using a @property below. If you inherit from PluginV3, remember + to call ``super().__init__(request)``. + + """ + + self._request = request + + def read(self, *, index: int = 0) -> np.ndarray: + """Read a ndimage. + + The ``read`` method loads a (single) ndimage, located at ``index`` from + the requested ImageResource. + + It is at the plugin's descretion to decide (and document) what + constitutes a single ndimage. A sensible way to make this decision is to + choose based on the ImageResource's format and on what users will expect + from such a format. For example, a sensible choice for a TIFF file + produced by an ImageJ hyperstack is to read it as a volumetric ndimage + (1 color dimension followed by 3 spatial dimensions). On the other hand, + a sensible choice for a MP4 file produced by Davinci Resolve is to treat + each frame as a ndimage (2 spatial dimensions followed by 1 color + dimension). + + The value ``index=None`` is special. It requests the plugin to load all + ndimages in the file and stack them along a new first axis. For example, + if a MP4 file is read with ``index=None`` and the plugin identifies + single frames as ndimages, then the plugin should read all frames and + stack them into a new ndimage which now contains a time axis as its + first axis. If a PNG file (single image format) is read with + ``index=None`` the plugin does a very similar thing: It loads all + ndimages in the file (here it's just one) and stacks them along a new + first axis, effectively prepending an axis with size 1 to the image. If + a plugin does not wish to support ``index=None`` it should set a more + sensible default and raise a ``ValueError`` when requested to read using + ``index=None``. + + Parameters + ---------- + index : int + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return it. + If index is an ellipsis (...), read all ndimages in the file and + stack them along a new batch dimension. If index is None, let the + plugin decide. If the index is out of bounds a ``ValueError`` is + raised. + **kwargs : Any + The read method may accept any number of plugin-specific keyword + arguments to further customize the read behavior. Usually these + match the arguments available on the backend and are forwarded to + it. + + Returns + ------- + ndimage : np.ndarray + A ndimage containing decoded pixel data (sometimes called bitmap). + + Notes + ----- + The ImageResource from which the plugin should read is managed by the + provided request object. Directly accessing the managed ImageResource is + _not_ permitted. Instead, you can get FileLike access to the + ImageResource via request.get_file(). + + If the backend doesn't support reading from FileLike objects, you can + request a temporary file to pass to the backend via + ``request.get_local_filename()``. This is, however, not very performant + (involves copying the Request's content into a temporary file), so you + should avoid doing this whenever possible. Consider it a fallback method + in case all else fails. + + """ + raise NotImplementedError() + + def write(self, ndimage: Union[ArrayLike, List[ArrayLike]]) -> Optional[bytes]: + """Write a ndimage to a ImageResource. + + The ``write`` method encodes the given ndimage into the format handled + by the backend and writes it to the ImageResource. It overwrites + any content that may have been previously stored in the file. + + If the backend supports only a single format then it must check if + the ImageResource matches that format and raise an exception if not. + Typically, this should be done during initialization in the form of a + ``InitializationError``. + + If the backend supports more than one format it must determine the + requested/desired format. Usually this can be done by inspecting the + ImageResource (e.g., by checking ``request.extension``), or by providing + a mechanism to explicitly set the format (perhaps with a - sensible - + default value). If the plugin can not determine the desired format, it + **must not** write to the ImageResource, but raise an exception instead. + + If the backend supports at least one format that can hold multiple + ndimages it should be capable of handling ndimage batches and lists of + ndimages. If the ``ndimage`` input is a list of ndimages, the plugin + should not assume that the ndimages are not stackable, i.e., ndimages + may have different shapes. Otherwise, the ``ndimage`` may be a batch of + multiple ndimages stacked along the first axis of the array. The plugin + must be able to discover this, either automatically or via additional + `kwargs`. If there is ambiguity in the process, the plugin must clearly + document what happens in such cases and, if possible, describe how to + resolve this ambiguity. + + Parameters + ---------- + ndimage : ArrayLike + The ndimage to encode and write to the current ImageResource. + **kwargs : Any + The write method may accept any number of plugin-specific keyword + arguments to customize the writing behavior. Usually these match the + arguments available on the backend and are forwarded to it. + + Returns + ------- + encoded_image : bytes or None + If the chosen ImageResource is the special target ``""`` then + write should return a byte string containing the encoded image data. + Otherwise, it returns None. + + Notes + ----- + The ImageResource to which the plugin should write to is managed by the + provided request object. Directly accessing the managed ImageResource is + _not_ permitted. Instead, you can get FileLike access to the + ImageResource via request.get_file(). + + If the backend doesn't support writing to FileLike objects, you can + request a temporary file to pass to the backend via + ``request.get_local_filename()``. This is, however, not very performant + (involves copying the Request's content from a temporary file), so you + should avoid doing this whenever possible. Consider it a fallback method + in case all else fails. + + """ + raise NotImplementedError() + + def iter(self) -> Iterator[np.ndarray]: + """Iterate the ImageResource. + + This method returns a generator that yields ndimages in the order in which + they appear in the file. This is roughly equivalent to:: + + idx = 0 + while True: + try: + yield self.read(index=idx) + except ValueError: + break + + It works very similar to ``read``, and you can consult the documentation + of that method for additional information on desired behavior. + + Parameters + ---------- + **kwargs : Any + The iter method may accept any number of plugin-specific keyword + arguments to further customize the reading/iteration behavior. + Usually these match the arguments available on the backend and are + forwarded to it. + + Yields + ------ + ndimage : np.ndarray + A ndimage containing decoded pixel data (sometimes called bitmap). + + See Also + -------- + PluginV3.read + + """ + raise NotImplementedError() + + def properties(self, index: int = 0) -> ImageProperties: + """Standardized ndimage metadata. + + Parameters + ---------- + index : int + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return its + properties. If index is an ellipsis (...), read all ndimages in the file + and stack them along a new batch dimension and return their properties. + If index is None, the plugin decides the default. + + Returns + ------- + properties : ImageProperties + A dataclass filled with standardized image metadata. + + """ + raise NotImplementedError() + + def metadata(self, index: int = 0, exclude_applied: bool = True) -> Dict[str, Any]: + """Format-Specific ndimage metadata. + + The method reads metadata stored in the ImageResource and returns it as + a python dict. The plugin is free to choose which name to give a piece + of metadata; however, if possible, it should match the name given by the + format. There is no requirement regarding the fields a plugin must + expose; however, if a plugin does expose any,``exclude_applied`` applies + to these fields. + + If the plugin does return metadata items, it must check the value of + ``exclude_applied`` before returning them. If ``exclude applied`` is + True, then any metadata item that would be applied to an ndimage + returned by ``read`` (or ``iter``) must not be returned. This is done to + avoid confusion; for example, if an ImageResource defines the ExIF + rotation tag, and the plugin applies the rotation to the data before + returning it, then ``exclude_applied`` prevents confusion on whether the + tag was already applied or not. + + The `kwarg` ``index`` behaves similar to its counterpart in ``read`` + with one exception: If the ``index`` is None, then global metadata is + returned instead of returning a combination of all metadata items. If + there is no global metadata, the Plugin should return an empty dict or + raise an exception. + + Parameters + ---------- + index : int + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return its + metadata. If index is an ellipsis (...), return global metadata. If + index is None, the plugin decides the default. + exclude_applied : bool + If True (default), do not report metadata fields that the plugin + would apply/consume while reading the image. + + Returns + ------- + metadata : dict + A dictionary filled with format-specific metadata fields and their + values. + + """ + raise NotImplementedError() + + def close(self) -> None: + """Close the ImageResource. + + This method allows a plugin to behave similar to the python built-in ``open``:: + + image_file = my_plugin(Request, "r") + ... + image_file.close() + + It is used by the context manager and deconstructor below to avoid leaking + ImageResources. If the plugin has no other cleanup to do it doesn't have + to overwrite this method itself and can rely on the implementation + below. + + """ + + self.request.finish() + + @property + def request(self) -> Request: + return self._request + + def __enter__(self) -> "PluginV3": + return self + + def __exit__(self, type, value, traceback) -> None: + self.close() + + def __del__(self) -> None: + self.close() diff --git a/.venv/Lib/site-packages/imageio/freeze.py b/.venv/Lib/site-packages/imageio/freeze.py new file mode 100644 index 00000000..3753a29d --- /dev/null +++ b/.venv/Lib/site-packages/imageio/freeze.py @@ -0,0 +1,11 @@ +""" +Helper functions for freezing imageio. +""" + + +def get_includes(): + return ["email", "urllib.request", "numpy", "zipfile", "io"] + + +def get_excludes(): + return [] diff --git a/.venv/Lib/site-packages/imageio/plugins/__init__.py b/.venv/Lib/site-packages/imageio/plugins/__init__.py new file mode 100644 index 00000000..741415e9 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/__init__.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +# flake8: noqa + +""" +Here you can find documentation on how to write your own plugin to allow +ImageIO to access a new backend. Plugins are quite object oriented, and +the relevant classes and their interaction are documented here: + +.. currentmodule:: imageio + +.. autosummary:: + :toctree: ../_autosummary + :template: better_class.rst + + imageio.core.Format + imageio.core.Request + +.. note:: + You can always check existing plugins if you want to see examples. + +What methods to implement +------------------------- + +To implement a new plugin, create a new class that inherits from +:class:`imageio.core.Format`. and implement the following functions: + +.. autosummary:: + :toctree: ../_autosummary + + imageio.core.Format.__init__ + imageio.core.Format._can_read + imageio.core.Format._can_write + +Further, each format contains up to two nested classes; one for reading and +one for writing. To support reading and/or writing, the respective classes +need to be defined. + +For reading, create a nested class that inherits from +``imageio.core.Format.Reader`` and that implements the following functions: + + * Implement ``_open(**kwargs)`` to initialize the reader. Deal with the + user-provided keyword arguments here. + * Implement ``_close()`` to clean up. + * Implement ``_get_length()`` to provide a suitable length based on what + the user expects. Can be ``inf`` for streaming data. + * Implement ``_get_data(index)`` to return an array and a meta-data dict. + * Implement ``_get_meta_data(index)`` to return a meta-data dict. If index + is None, it should return the 'global' meta-data. + +For writing, create a nested class that inherits from +``imageio.core.Format.Writer`` and implement the following functions: + + * Implement ``_open(**kwargs)`` to initialize the writer. Deal with the + user-provided keyword arguments here. + * Implement ``_close()`` to clean up. + * Implement ``_append_data(im, meta)`` to add data (and meta-data). + * Implement ``_set_meta_data(meta)`` to set the global meta-data. + +""" + +import importlib +import os +import warnings + + +# v2 imports remove in v3 +from .. import formats + +# v2 allows formatting plugins by environment variable +# this is done here. +env_plugin_order = os.getenv("IMAGEIO_FORMAT_ORDER", None) +if env_plugin_order is not None: # pragma: no cover + warnings.warn( + "Setting plugin priority through an environment variable is" + " deprecated and will be removed in ImageIO v3. There is no" + " replacement planned for this feature. If you have an" + " active use-case for it, please reach out to us on GitHub.", + DeprecationWarning, + ) + + formats.sort(*os.getenv("IMAGEIO_FORMAT_ORDER", "").split(",")) + + +# this class replaces plugin module. For details +# see https://stackoverflow.com/questions/2447353/getattr-on-a-module +def __getattr__(name): + """Lazy-Import Plugins + + This function dynamically loads plugins into the imageio.plugin + namespace upon first access. For example, the following snippet will + delay importing freeimage until the second line: + + >>> import imageio + >>> imageio.plugins.freeimage.download() + + """ + + try: + return importlib.import_module(f"imageio.plugins.{name}") + except ImportError: + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") from None diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..8bc2e26f Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/_bsdf.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_bsdf.cpython-311.pyc new file mode 100644 index 00000000..78eb5e63 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_bsdf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/_dicom.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_dicom.cpython-311.pyc new file mode 100644 index 00000000..000085c2 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_dicom.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/_freeimage.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_freeimage.cpython-311.pyc new file mode 100644 index 00000000..e4c0cd74 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_freeimage.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/_swf.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_swf.cpython-311.pyc new file mode 100644 index 00000000..c4fc6457 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_swf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/_tifffile.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_tifffile.cpython-311.pyc new file mode 100644 index 00000000..57fbd65e Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/_tifffile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/bsdf.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/bsdf.cpython-311.pyc new file mode 100644 index 00000000..bf2eab41 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/bsdf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/dicom.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/dicom.cpython-311.pyc new file mode 100644 index 00000000..1b4de83c Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/dicom.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/example.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/example.cpython-311.pyc new file mode 100644 index 00000000..eeb5f07f Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/example.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/feisem.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/feisem.cpython-311.pyc new file mode 100644 index 00000000..7fb4f631 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/feisem.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/ffmpeg.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/ffmpeg.cpython-311.pyc new file mode 100644 index 00000000..6b83e9a2 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/ffmpeg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/fits.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/fits.cpython-311.pyc new file mode 100644 index 00000000..0a8079ba Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/fits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/freeimage.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/freeimage.cpython-311.pyc new file mode 100644 index 00000000..921d4a63 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/freeimage.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/freeimagemulti.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/freeimagemulti.cpython-311.pyc new file mode 100644 index 00000000..5fe6bcf5 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/freeimagemulti.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/gdal.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/gdal.cpython-311.pyc new file mode 100644 index 00000000..1bdb9944 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/gdal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/grab.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/grab.cpython-311.pyc new file mode 100644 index 00000000..a327910b Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/grab.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/lytro.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/lytro.cpython-311.pyc new file mode 100644 index 00000000..6726096b Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/lytro.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/npz.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/npz.cpython-311.pyc new file mode 100644 index 00000000..be72df74 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/npz.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/opencv.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/opencv.cpython-311.pyc new file mode 100644 index 00000000..a44a1a27 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/opencv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow.cpython-311.pyc new file mode 100644 index 00000000..58f59e3e Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow_info.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow_info.cpython-311.pyc new file mode 100644 index 00000000..5a4c67f0 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow_info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow_legacy.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow_legacy.cpython-311.pyc new file mode 100644 index 00000000..61a14afc Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillow_legacy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillowmulti.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillowmulti.cpython-311.pyc new file mode 100644 index 00000000..4c19eb32 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pillowmulti.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/pyav.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pyav.cpython-311.pyc new file mode 100644 index 00000000..6fb02316 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/pyav.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/simpleitk.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/simpleitk.cpython-311.pyc new file mode 100644 index 00000000..463a7dec Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/simpleitk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/spe.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/spe.cpython-311.pyc new file mode 100644 index 00000000..247b76c3 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/spe.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/swf.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/swf.cpython-311.pyc new file mode 100644 index 00000000..686634b4 Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/swf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/tifffile.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/tifffile.cpython-311.pyc new file mode 100644 index 00000000..4f00f3ee Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/tifffile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/__pycache__/tifffile_v3.cpython-311.pyc b/.venv/Lib/site-packages/imageio/plugins/__pycache__/tifffile_v3.cpython-311.pyc new file mode 100644 index 00000000..41820f3c Binary files /dev/null and b/.venv/Lib/site-packages/imageio/plugins/__pycache__/tifffile_v3.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio/plugins/_bsdf.py b/.venv/Lib/site-packages/imageio/plugins/_bsdf.py new file mode 100644 index 00000000..d6f56ce0 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/_bsdf.py @@ -0,0 +1,915 @@ +#!/usr/bin/env python +# This file is distributed under the terms of the 2-clause BSD License. +# Copyright (c) 2017-2018, Almar Klein + +""" +Python implementation of the Binary Structured Data Format (BSDF). + +BSDF is a binary format for serializing structured (scientific) data. +See http://bsdf.io for more information. + +This is the reference implementation, which is relatively relatively +sophisticated, providing e.g. lazy loading of blobs and streamed +reading/writing. A simpler Python implementation is available as +``bsdf_lite.py``. + +This module has no dependencies and works on Python 2.7 and 3.4+. + +Note: on Legacy Python (Python 2.7), non-Unicode strings are encoded as bytes. +""" + +# todo: in 2020, remove six stuff, __future__ and _isidentifier +# todo: in 2020, remove 'utf-8' args to encode/decode; it's faster + +from __future__ import absolute_import, division, print_function + +import bz2 +import hashlib +import logging +import os +import re +import struct +import sys +import types +import zlib +from io import BytesIO + +logger = logging.getLogger(__name__) + +# Notes on versioning: the major and minor numbers correspond to the +# BSDF format version. The major number if increased when backward +# incompatible changes are introduced. An implementation must raise an +# exception when the file being read has a higher major version. The +# minor number is increased when new backward compatible features are +# introduced. An implementation must display a warning when the file +# being read has a higher minor version. The patch version is increased +# for subsequent releases of the implementation. +VERSION = 2, 1, 2 +__version__ = ".".join(str(i) for i in VERSION) + + +# %% The encoder and decoder implementation + +# From six.py +PY3 = sys.version_info[0] >= 3 +if PY3: + text_type = str + string_types = str + unicode_types = str + integer_types = int + classtypes = type +else: # pragma: no cover + logging.basicConfig() # avoid "no handlers found" error + text_type = unicode # noqa + string_types = basestring # noqa + unicode_types = unicode # noqa + integer_types = (int, long) # noqa + classtypes = type, types.ClassType + +# Shorthands +spack = struct.pack +strunpack = struct.unpack + + +def lencode(x): + """Encode an unsigned integer into a variable sized blob of bytes.""" + # We could support 16 bit and 32 bit as well, but the gain is low, since + # 9 bytes for collections with over 250 elements is marginal anyway. + if x <= 250: + return spack(" extension + self._extensions_by_cls = {} # cls -> (name, extension.encode) + if extensions is None: + extensions = standard_extensions + for extension in extensions: + self.add_extension(extension) + self._parse_options(**options) + + def _parse_options( + self, + compression=0, + use_checksum=False, + float64=True, + load_streaming=False, + lazy_blob=False, + ): + # Validate compression + if isinstance(compression, string_types): + m = {"no": 0, "zlib": 1, "bz2": 2} + compression = m.get(compression.lower(), compression) + if compression not in (0, 1, 2): + raise TypeError("Compression must be 0, 1, 2, " '"no", "zlib", or "bz2"') + self._compression = compression + + # Other encoding args + self._use_checksum = bool(use_checksum) + self._float64 = bool(float64) + + # Decoding args + self._load_streaming = bool(load_streaming) + self._lazy_blob = bool(lazy_blob) + + def add_extension(self, extension_class): + """Add an extension to this serializer instance, which must be + a subclass of Extension. Can be used as a decorator. + """ + # Check class + if not ( + isinstance(extension_class, type) and issubclass(extension_class, Extension) + ): + raise TypeError("add_extension() expects a Extension class.") + extension = extension_class() + + # Get name + name = extension.name + if not isinstance(name, str): + raise TypeError("Extension name must be str.") + if len(name) == 0 or len(name) > 250: + raise NameError( + "Extension names must be nonempty and shorter " "than 251 chars." + ) + if name in self._extensions: + logger.warning( + 'BSDF warning: overwriting extension "%s", ' + "consider removing first" % name + ) + + # Get classes + cls = extension.cls + if not cls: + clss = [] + elif isinstance(cls, (tuple, list)): + clss = cls + else: + clss = [cls] + for cls in clss: + if not isinstance(cls, classtypes): + raise TypeError("Extension classes must be types.") + + # Store + for cls in clss: + self._extensions_by_cls[cls] = name, extension.encode + self._extensions[name] = extension + return extension_class + + def remove_extension(self, name): + """Remove a converted by its unique name.""" + if not isinstance(name, str): + raise TypeError("Extension name must be str.") + if name in self._extensions: + self._extensions.pop(name) + for cls in list(self._extensions_by_cls.keys()): + if self._extensions_by_cls[cls][0] == name: + self._extensions_by_cls.pop(cls) + + def _encode(self, f, value, streams, ext_id): + """Main encoder function.""" + x = encode_type_id + + if value is None: + f.write(x(b"v", ext_id)) # V for void + elif value is True: + f.write(x(b"y", ext_id)) # Y for yes + elif value is False: + f.write(x(b"n", ext_id)) # N for no + elif isinstance(value, integer_types): + if -32768 <= value <= 32767: + f.write(x(b"h", ext_id) + spack("h", value)) # H for ... + else: + f.write(x(b"i", ext_id) + spack(" 0: + raise ValueError("Can only have one stream per file.") + streams.append(value) + value._activate(f, self._encode, self._decode) # noqa + else: + if ext_id is not None: + raise ValueError( + "Extension %s wronfully encodes object to another " + "extension object (though it may encode to a list/dict " + "that contains other extension objects)." % ext_id + ) + # Try if the value is of a type we know + ex = self._extensions_by_cls.get(value.__class__, None) + # Maybe its a subclass of a type we know + if ex is None: + for name, c in self._extensions.items(): + if c.match(self, value): + ex = name, c.encode + break + else: + ex = None + # Success or fail + if ex is not None: + ext_id2, extension_encode = ex + self._encode(f, extension_encode(self, value), streams, ext_id2) + else: + t = ( + "Class %r is not a valid base BSDF type, nor is it " + "handled by an extension." + ) + raise TypeError(t % value.__class__.__name__) + + def _decode(self, f): + """Main decoder function.""" + + # Get value + char = f.read(1) + c = char.lower() + + # Conversion (uppercase value identifiers signify converted values) + if not char: + raise EOFError() + elif char != c: + n = strunpack("= 254: + # Streaming + closed = n == 254 + n = strunpack(" 0 + name = f.read(n_name).decode("UTF-8") + value[name] = self._decode(f) + elif c == b"b": + if self._lazy_blob: + value = Blob((f, True)) + else: + blob = Blob((f, False)) + value = blob.get_bytes() + else: + raise RuntimeError("Parse error %r" % char) + + # Convert value if we have an extension for it + if ext_id is not None: + extension = self._extensions.get(ext_id, None) + if extension is not None: + value = extension.decode(self, value) + else: + logger.warning("BSDF warning: no extension found for %r" % ext_id) + + return value + + def encode(self, ob): + """Save the given object to bytes.""" + f = BytesIO() + self.save(f, ob) + return f.getvalue() + + def save(self, f, ob): + """Write the given object to the given file object.""" + f.write(b"BSDF") + f.write(struct.pack(" 0: + stream = streams[0] + if stream._start_pos != f.tell(): + raise ValueError( + "The stream object must be " "the last object to be encoded." + ) + + def decode(self, bb): + """Load the data structure that is BSDF-encoded in the given bytes.""" + f = BytesIO(bb) + return self.load(f) + + def load(self, f): + """Load a BSDF-encoded object from the given file object.""" + # Check magic string + f4 = f.read(4) + if f4 != b"BSDF": + raise RuntimeError("This does not look like a BSDF file: %r" % f4) + # Check version + major_version = strunpack(" VERSION[1]: # minor should be < ours + t = ( + "BSDF warning: reading file with higher minor version (%s) " + "than the implementation (%s)." + ) + logger.warning(t % (__version__, file_version)) + + return self._decode(f) + + +# %% Streaming and blob-files + + +class BaseStream(object): + """Base class for streams.""" + + def __init__(self, mode="w"): + self._i = 0 + self._count = -1 + if isinstance(mode, int): + self._count = mode + mode = "r" + elif mode == "w": + self._count = 0 + assert mode in ("r", "w") + self._mode = mode + self._f = None + self._start_pos = 0 + + def _activate(self, file, encode_func, decode_func): + if self._f is not None: # Associated with another write + raise IOError("Stream object cannot be activated twice?") + self._f = file + self._start_pos = self._f.tell() + self._encode = encode_func + self._decode = decode_func + + @property + def mode(self): + """The mode of this stream: 'r' or 'w'.""" + return self._mode + + +class ListStream(BaseStream): + """A streamable list object used for writing or reading. + In read mode, it can also be iterated over. + """ + + @property + def count(self): + """The number of elements in the stream (can be -1 for unclosed + streams in read-mode). + """ + return self._count + + @property + def index(self): + """The current index of the element to read/write.""" + return self._i + + def append(self, item): + """Append an item to the streaming list. The object is immediately + serialized and written to the underlying file. + """ + # if self._mode != 'w': + # raise IOError('This ListStream is not in write mode.') + if self._count != self._i: + raise IOError("Can only append items to the end of the stream.") + if self._f is None: + raise IOError("List stream is not associated with a file yet.") + if self._f.closed: + raise IOError("Cannot stream to a close file.") + self._encode(self._f, item, [self], None) + self._i += 1 + self._count += 1 + + def close(self, unstream=False): + """Close the stream, marking the number of written elements. New + elements may still be appended, but they won't be read during decoding. + If ``unstream`` is False, the stream is turned into a regular list + (not streaming). + """ + # if self._mode != 'w': + # raise IOError('This ListStream is not in write mode.') + if self._count != self._i: + raise IOError("Can only close when at the end of the stream.") + if self._f is None: + raise IOError("ListStream is not associated with a file yet.") + if self._f.closed: + raise IOError("Cannot close a stream on a close file.") + i = self._f.tell() + self._f.seek(self._start_pos - 8 - 1) + self._f.write(spack("= 0: + if self._i >= self._count: + raise StopIteration() + self._i += 1 + return self._decode(self._f) + else: + # This raises EOFError at some point. + try: + res = self._decode(self._f) + self._i += 1 + return res + except EOFError: + self._count = self._i + raise StopIteration() + + def __iter__(self): + if self._mode != "r": + raise IOError("Cannot iterate: ListStream in not in read mode.") + return self + + def __next__(self): + return self.next() + + +class Blob(object): + """Object to represent a blob of bytes. When used to write a BSDF file, + it's a wrapper for bytes plus properties such as what compression to apply. + When used to read a BSDF file, it can be used to read the data lazily, and + also modify the data if reading in 'r+' mode and the blob isn't compressed. + """ + + # For now, this does not allow re-sizing blobs (within the allocated size) + # but this can be added later. + + def __init__(self, bb, compression=0, extra_size=0, use_checksum=False): + if isinstance(bb, bytes): + self._f = None + self.compressed = self._from_bytes(bb, compression) + self.compression = compression + self.allocated_size = self.used_size + extra_size + self.use_checksum = use_checksum + elif isinstance(bb, tuple) and len(bb) == 2 and hasattr(bb[0], "read"): + self._f, allow_seek = bb + self.compressed = None + self._from_file(self._f, allow_seek) + self._modified = False + else: + raise TypeError("Wrong argument to create Blob.") + + def _from_bytes(self, value, compression): + """When used to wrap bytes in a blob.""" + if compression == 0: + compressed = value + elif compression == 1: + compressed = zlib.compress(value, 9) + elif compression == 2: + compressed = bz2.compress(value, 9) + else: # pragma: no cover + assert False, "Unknown compression identifier" + + self.data_size = len(value) + self.used_size = len(compressed) + return compressed + + def _to_file(self, f): + """Private friend method called by encoder to write a blob to a file.""" + # Write sizes - write at least in a size that allows resizing + if self.allocated_size <= 250 and self.compression == 0: + f.write(spack(" self.allocated_size: + raise IOError("Seek beyond blob boundaries.") + self._f.seek(self.start_pos + p) + + def tell(self): + """Get the current file pointer position (relative to the blob start).""" + if self._f is None: + raise RuntimeError( + "Cannot tell in a blob " "that is not created by the BSDF decoder." + ) + return self._f.tell() - self.start_pos + + def write(self, bb): + """Write bytes to the blob.""" + if self._f is None: + raise RuntimeError( + "Cannot write in a blob " "that is not created by the BSDF decoder." + ) + if self.compression: + raise IOError("Cannot arbitrarily write in compressed blob.") + if self._f.tell() + len(bb) > self.end_pos: + raise IOError("Write beyond blob boundaries.") + self._modified = True + return self._f.write(bb) + + def read(self, n): + """Read n bytes from the blob.""" + if self._f is None: + raise RuntimeError( + "Cannot read in a blob " "that is not created by the BSDF decoder." + ) + if self.compression: + raise IOError("Cannot arbitrarily read in compressed blob.") + if self._f.tell() + n > self.end_pos: + raise IOError("Read beyond blob boundaries.") + return self._f.read(n) + + def get_bytes(self): + """Get the contents of the blob as bytes.""" + if self.compressed is not None: + compressed = self.compressed + else: + i = self._f.tell() + self.seek(0) + compressed = self._f.read(self.used_size) + self._f.seek(i) + if self.compression == 0: + value = compressed + elif self.compression == 1: + value = zlib.decompress(compressed) + elif self.compression == 2: + value = bz2.decompress(compressed) + else: # pragma: no cover + raise RuntimeError("Invalid compression %i" % self.compression) + return value + + def update_checksum(self): + """Reset the blob's checksum if present. Call this after modifying + the data. + """ + # or ... should the presence of a checksum mean that data is proteced? + if self.use_checksum and self._modified: + self.seek(0) + compressed = self._f.read(self.used_size) + self._f.seek(self.start_pos - self.alignment - 1 - 16) + self._f.write(hashlib.md5(compressed).digest()) + + +# %% High-level functions + + +def encode(ob, extensions=None, **options): + """Save (BSDF-encode) the given object to bytes. + See `BSDFSerializer` for details on extensions and options. + """ + s = BsdfSerializer(extensions, **options) + return s.encode(ob) + + +def save(f, ob, extensions=None, **options): + """Save (BSDF-encode) the given object to the given filename or + file object. See` BSDFSerializer` for details on extensions and options. + """ + s = BsdfSerializer(extensions, **options) + if isinstance(f, string_types): + with open(f, "wb") as fp: + return s.save(fp, ob) + else: + return s.save(f, ob) + + +def decode(bb, extensions=None, **options): + """Load a (BSDF-encoded) structure from bytes. + See `BSDFSerializer` for details on extensions and options. + """ + s = BsdfSerializer(extensions, **options) + return s.decode(bb) + + +def load(f, extensions=None, **options): + """Load a (BSDF-encoded) structure from the given filename or file object. + See `BSDFSerializer` for details on extensions and options. + """ + s = BsdfSerializer(extensions, **options) + if isinstance(f, string_types): + if f.startswith(("~/", "~\\")): # pragma: no cover + f = os.path.expanduser(f) + with open(f, "rb") as fp: + return s.load(fp) + else: + return s.load(f) + + +# Aliases for json compat +loads = decode +dumps = encode + + +# %% Standard extensions + +# Defining extensions as a dict would be more compact and feel lighter, but +# that would only allow lambdas, which is too limiting, e.g. for ndarray +# extension. + + +class Extension(object): + """Base class to implement BSDF extensions for special data types. + + Extension classes are provided to the BSDF serializer, which + instantiates the class. That way, the extension can be somewhat dynamic: + e.g. the NDArrayExtension exposes the ndarray class only when numpy + is imported. + + A extension instance must have two attributes. These can be attributes of + the class, or of the instance set in ``__init__()``: + + * name (str): the name by which encoded values will be identified. + * cls (type): the type (or list of types) to match values with. + This is optional, but it makes the encoder select extensions faster. + + Further, it needs 3 methods: + + * `match(serializer, value) -> bool`: return whether the extension can + convert the given value. The default is ``isinstance(value, self.cls)``. + * `encode(serializer, value) -> encoded_value`: the function to encode a + value to more basic data types. + * `decode(serializer, encoded_value) -> value`: the function to decode an + encoded value back to its intended representation. + + """ + + name = "" + cls = () + + def __repr__(self): + return "" % (self.name, hex(id(self))) + + def match(self, s, v): + return isinstance(v, self.cls) + + def encode(self, s, v): + raise NotImplementedError() + + def decode(self, s, v): + raise NotImplementedError() + + +class ComplexExtension(Extension): + name = "c" + cls = complex + + def encode(self, s, v): + return (v.real, v.imag) + + def decode(self, s, v): + return complex(v[0], v[1]) + + +class NDArrayExtension(Extension): + name = "ndarray" + + def __init__(self): + if "numpy" in sys.modules: + import numpy as np + + self.cls = np.ndarray + + def match(self, s, v): # pragma: no cover - e.g. work for nd arrays in JS + return hasattr(v, "shape") and hasattr(v, "dtype") and hasattr(v, "tobytes") + + def encode(self, s, v): + return dict(shape=v.shape, dtype=text_type(v.dtype), data=v.tobytes()) + + def decode(self, s, v): + try: + import numpy as np + except ImportError: # pragma: no cover + return v + a = np.frombuffer(v["data"], dtype=v["dtype"]) + a.shape = v["shape"] + return a + + +standard_extensions = [ComplexExtension, NDArrayExtension] + + +if __name__ == "__main__": + # Invoke CLI + import bsdf_cli + + bsdf_cli.main() diff --git a/.venv/Lib/site-packages/imageio/plugins/_dicom.py b/.venv/Lib/site-packages/imageio/plugins/_dicom.py new file mode 100644 index 00000000..96fb6fcd --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/_dicom.py @@ -0,0 +1,932 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Plugin for reading DICOM files. +""" + +# todo: Use pydicom: +# * Note: is not py3k ready yet +# * Allow reading the full meta info +# I think we can more or less replace the SimpleDicomReader with a +# pydicom.Dataset For series, only ned to read the full info from one +# file: speed still high +# * Perhaps allow writing? + +import sys +import os +import struct +import logging + +import numpy as np + + +logger = logging.getLogger(__name__) + +# Determine endianity of system +sys_is_little_endian = sys.byteorder == "little" + +# Define a dictionary that contains the tags that we would like to know +MINIDICT = { + (0x7FE0, 0x0010): ("PixelData", "OB"), + # Date and time + (0x0008, 0x0020): ("StudyDate", "DA"), + (0x0008, 0x0021): ("SeriesDate", "DA"), + (0x0008, 0x0022): ("AcquisitionDate", "DA"), + (0x0008, 0x0023): ("ContentDate", "DA"), + (0x0008, 0x0030): ("StudyTime", "TM"), + (0x0008, 0x0031): ("SeriesTime", "TM"), + (0x0008, 0x0032): ("AcquisitionTime", "TM"), + (0x0008, 0x0033): ("ContentTime", "TM"), + # With what, where, by whom? + (0x0008, 0x0060): ("Modality", "CS"), + (0x0008, 0x0070): ("Manufacturer", "LO"), + (0x0008, 0x0080): ("InstitutionName", "LO"), + # Descriptions + (0x0008, 0x1030): ("StudyDescription", "LO"), + (0x0008, 0x103E): ("SeriesDescription", "LO"), + # UID's + (0x0008, 0x0016): ("SOPClassUID", "UI"), + (0x0008, 0x0018): ("SOPInstanceUID", "UI"), + (0x0020, 0x000D): ("StudyInstanceUID", "UI"), + (0x0020, 0x000E): ("SeriesInstanceUID", "UI"), + (0x0008, 0x0117): ("ContextUID", "UI"), + # Numbers + (0x0020, 0x0011): ("SeriesNumber", "IS"), + (0x0020, 0x0012): ("AcquisitionNumber", "IS"), + (0x0020, 0x0013): ("InstanceNumber", "IS"), + (0x0020, 0x0014): ("IsotopeNumber", "IS"), + (0x0020, 0x0015): ("PhaseNumber", "IS"), + (0x0020, 0x0016): ("IntervalNumber", "IS"), + (0x0020, 0x0017): ("TimeSlotNumber", "IS"), + (0x0020, 0x0018): ("AngleNumber", "IS"), + (0x0020, 0x0019): ("ItemNumber", "IS"), + (0x0020, 0x0020): ("PatientOrientation", "CS"), + (0x0020, 0x0030): ("ImagePosition", "CS"), + (0x0020, 0x0032): ("ImagePositionPatient", "CS"), + (0x0020, 0x0035): ("ImageOrientation", "CS"), + (0x0020, 0x0037): ("ImageOrientationPatient", "CS"), + # Patient information + (0x0010, 0x0010): ("PatientName", "PN"), + (0x0010, 0x0020): ("PatientID", "LO"), + (0x0010, 0x0030): ("PatientBirthDate", "DA"), + (0x0010, 0x0040): ("PatientSex", "CS"), + (0x0010, 0x1010): ("PatientAge", "AS"), + (0x0010, 0x1020): ("PatientSize", "DS"), + (0x0010, 0x1030): ("PatientWeight", "DS"), + # Image specific (required to construct numpy array) + (0x0028, 0x0002): ("SamplesPerPixel", "US"), + (0x0028, 0x0008): ("NumberOfFrames", "IS"), + (0x0028, 0x0100): ("BitsAllocated", "US"), + (0x0028, 0x0101): ("BitsStored", "US"), + (0x0028, 0x0102): ("HighBit", "US"), + (0x0028, 0x0103): ("PixelRepresentation", "US"), + (0x0028, 0x0010): ("Rows", "US"), + (0x0028, 0x0011): ("Columns", "US"), + (0x0028, 0x1052): ("RescaleIntercept", "DS"), + (0x0028, 0x1053): ("RescaleSlope", "DS"), + # Image specific (for the user) + (0x0028, 0x0030): ("PixelSpacing", "DS"), + (0x0018, 0x0088): ("SliceSpacing", "DS"), +} + +# Define some special tags: +# See PS 3.5-2008 section 7.5 (p.40) +ItemTag = (0xFFFE, 0xE000) # start of Sequence Item +ItemDelimiterTag = (0xFFFE, 0xE00D) # end of Sequence Item +SequenceDelimiterTag = (0xFFFE, 0xE0DD) # end of Sequence of undefined length + +# Define set of groups that we're interested in (so we can quickly skip others) +GROUPS = set([key[0] for key in MINIDICT.keys()]) +VRS = set([val[1] for val in MINIDICT.values()]) + + +class NotADicomFile(Exception): + pass + + +class CompressedDicom(RuntimeError): + pass + + +class SimpleDicomReader(object): + """ + This class provides reading of pixel data from DICOM files. It is + focussed on getting the pixel data, not the meta info. + + To use, first create an instance of this class (giving it + a file object or filename). Next use the info attribute to + get a dict of the meta data. The loading of pixel data is + deferred until get_numpy_array() is called. + + Comparison with Pydicom + ----------------------- + + This code focusses on getting the pixel data out, which allows some + shortcuts, resulting in the code being much smaller. + + Since the processing of data elements is much cheaper (it skips a lot + of tags), this code is about 3x faster than pydicom (except for the + deflated DICOM files). + + This class does borrow some code (and ideas) from the pydicom + project, and (to the best of our knowledge) has the same limitations + as pydicom with regard to the type of files that it can handle. + + Limitations + ----------- + + For more advanced DICOM processing, please check out pydicom. + + * Only a predefined subset of data elements (meta information) is read. + * This is a reader; it can not write DICOM files. + * (just like pydicom) it can handle none of the compressed DICOM + formats except for "Deflated Explicit VR Little Endian" + (1.2.840.10008.1.2.1.99). + + """ + + def __init__(self, file): + # Open file if filename given + if isinstance(file, str): + self._filename = file + self._file = open(file, "rb") + else: + self._filename = "" + self._file = file + # Init variable to store position and size of pixel data + self._pixel_data_loc = None + # The meta header is always explicit and little endian + self.is_implicit_VR = False + self.is_little_endian = True + self._unpackPrefix = "<" + # Dict to store data elements of interest in + self._info = {} + # VR Conversion + self._converters = { + # Numbers + "US": lambda x: self._unpack("H", x), + "UL": lambda x: self._unpack("L", x), + # Numbers encoded as strings + "DS": lambda x: self._splitValues(x, float, "\\"), + "IS": lambda x: self._splitValues(x, int, "\\"), + # strings + "AS": lambda x: x.decode("ascii", "ignore").strip("\x00"), + "DA": lambda x: x.decode("ascii", "ignore").strip("\x00"), + "TM": lambda x: x.decode("ascii", "ignore").strip("\x00"), + "UI": lambda x: x.decode("ascii", "ignore").strip("\x00"), + "LO": lambda x: x.decode("utf-8", "ignore").strip("\x00").rstrip(), + "CS": lambda x: self._splitValues(x, float, "\\"), + "PN": lambda x: x.decode("utf-8", "ignore").strip("\x00").rstrip(), + } + + # Initiate reading + self._read() + + @property + def info(self): + return self._info + + def _splitValues(self, x, type, splitter): + s = x.decode("ascii").strip("\x00") + try: + if splitter in s: + return tuple([type(v) for v in s.split(splitter) if v.strip()]) + else: + return type(s) + except ValueError: + return s + + def _unpack(self, fmt, value): + return struct.unpack(self._unpackPrefix + fmt, value)[0] + + # Really only so we need minimal changes to _pixel_data_numpy + def __iter__(self): + return iter(self._info.keys()) + + def __getattr__(self, key): + info = object.__getattribute__(self, "_info") + if key in info: + return info[key] + return object.__getattribute__(self, key) # pragma: no cover + + def _read(self): + f = self._file + # Check prefix after peamble + f.seek(128) + if f.read(4) != b"DICM": + raise NotADicomFile("Not a valid DICOM file.") + # Read + self._read_header() + self._read_data_elements() + self._get_shape_and_sampling() + # Close if done, reopen if necessary to read pixel data + if os.path.isfile(self._filename): + self._file.close() + self._file = None + + def _readDataElement(self): + f = self._file + # Get group and element + group = self._unpack("H", f.read(2)) + element = self._unpack("H", f.read(2)) + # Get value length + if self.is_implicit_VR: + vl = self._unpack("I", f.read(4)) + else: + vr = f.read(2) + if vr in (b"OB", b"OW", b"SQ", b"UN"): + reserved = f.read(2) # noqa + vl = self._unpack("I", f.read(4)) + else: + vl = self._unpack("H", f.read(2)) + # Get value + if group == 0x7FE0 and element == 0x0010: + here = f.tell() + self._pixel_data_loc = here, vl + f.seek(here + vl) + return group, element, b"Deferred loading of pixel data" + else: + if vl == 0xFFFFFFFF: + value = self._read_undefined_length_value() + else: + value = f.read(vl) + return group, element, value + + def _read_undefined_length_value(self, read_size=128): + """Copied (in compacted form) from PyDicom + Copyright Darcy Mason. + """ + fp = self._file + # data_start = fp.tell() + search_rewind = 3 + bytes_to_find = struct.pack( + self._unpackPrefix + "HH", SequenceDelimiterTag[0], SequenceDelimiterTag[1] + ) + + found = False + value_chunks = [] + while not found: + chunk_start = fp.tell() + bytes_read = fp.read(read_size) + if len(bytes_read) < read_size: + # try again, + # if still don't get required amount, this is last block + new_bytes = fp.read(read_size - len(bytes_read)) + bytes_read += new_bytes + if len(bytes_read) < read_size: + raise EOFError( + "End of file reached before sequence " "delimiter found." + ) + index = bytes_read.find(bytes_to_find) + if index != -1: + found = True + value_chunks.append(bytes_read[:index]) + fp.seek(chunk_start + index + 4) # rewind to end of delimiter + length = fp.read(4) + if length != b"\0\0\0\0": + logger.warning( + "Expected 4 zero bytes after undefined length " "delimiter" + ) + else: + fp.seek(fp.tell() - search_rewind) # rewind a bit + # accumulate the bytes read (not including the rewind) + value_chunks.append(bytes_read[:-search_rewind]) + + # if get here then have found the byte string + return b"".join(value_chunks) + + def _read_header(self): + f = self._file + TransferSyntaxUID = None + + # Read all elements, store transferSyntax when we encounter it + try: + while True: + fp_save = f.tell() + # Get element + group, element, value = self._readDataElement() + if group == 0x02: + if group == 0x02 and element == 0x10: + TransferSyntaxUID = value.decode("ascii").strip("\x00") + else: + # No more group 2: rewind and break + # (don't trust group length) + f.seek(fp_save) + break + except (EOFError, struct.error): # pragma: no cover + raise RuntimeError("End of file reached while still in header.") + + # Handle transfer syntax + self._info["TransferSyntaxUID"] = TransferSyntaxUID + # + if TransferSyntaxUID is None: + # Assume ExplicitVRLittleEndian + is_implicit_VR, is_little_endian = False, True + elif TransferSyntaxUID == "1.2.840.10008.1.2.1": + # ExplicitVRLittleEndian + is_implicit_VR, is_little_endian = False, True + elif TransferSyntaxUID == "1.2.840.10008.1.2.2": + # ExplicitVRBigEndian + is_implicit_VR, is_little_endian = False, False + elif TransferSyntaxUID == "1.2.840.10008.1.2": + # implicit VR little endian + is_implicit_VR, is_little_endian = True, True + elif TransferSyntaxUID == "1.2.840.10008.1.2.1.99": + # DeflatedExplicitVRLittleEndian: + is_implicit_VR, is_little_endian = False, True + self._inflate() + else: + # http://www.dicomlibrary.com/dicom/transfer-syntax/ + t, extra_info = TransferSyntaxUID, "" + if "1.2.840.10008.1.2.4.50" <= t < "1.2.840.10008.1.2.4.99": + extra_info = " (JPEG)" + if "1.2.840.10008.1.2.4.90" <= t < "1.2.840.10008.1.2.4.99": + extra_info = " (JPEG 2000)" + if t == "1.2.840.10008.1.2.5": + extra_info = " (RLE)" + if t == "1.2.840.10008.1.2.6.1": + extra_info = " (RFC 2557)" + raise CompressedDicom( + "The dicom reader can only read files with " + "uncompressed image data - not %r%s. You " + "can try using dcmtk or gdcm to convert the " + "image." % (t, extra_info) + ) + + # From hereon, use implicit/explicit big/little endian + self.is_implicit_VR = is_implicit_VR + self.is_little_endian = is_little_endian + self._unpackPrefix = "><"[is_little_endian] + + def _read_data_elements(self): + info = self._info + try: + while True: + # Get element + group, element, value = self._readDataElement() + # Is it a group we are interested in? + if group in GROUPS: + key = (group, element) + name, vr = MINIDICT.get(key, (None, None)) + # Is it an element we are interested in? + if name: + # Store value + converter = self._converters.get(vr, lambda x: x) + info[name] = converter(value) + except (EOFError, struct.error): + pass # end of file ... + + def get_numpy_array(self): + """Get numpy arra for this DICOM file, with the correct shape, + and pixel values scaled appropriately. + """ + # Is there pixel data at all? + if "PixelData" not in self: + raise TypeError("No pixel data found in this dataset.") + + # Load it now if it was not already loaded + if self._pixel_data_loc and len(self.PixelData) < 100: + # Reopen file? + close_file = False + if self._file is None: + close_file = True + self._file = open(self._filename, "rb") + # Read data + self._file.seek(self._pixel_data_loc[0]) + if self._pixel_data_loc[1] == 0xFFFFFFFF: + value = self._read_undefined_length_value() + else: + value = self._file.read(self._pixel_data_loc[1]) + # Close file + if close_file: + self._file.close() + self._file = None + # Overwrite + self._info["PixelData"] = value + + # Get data + data = self._pixel_data_numpy() + data = self._apply_slope_and_offset(data) + + # Remove data again to preserve memory + # Note that the data for the original file is loaded twice ... + self._info["PixelData"] = ( + b"Data converted to numpy array, " + b"raw data removed to preserve memory" + ) + return data + + def _get_shape_and_sampling(self): + """Get shape and sampling without actuall using the pixel data. + In this way, the user can get an idea what's inside without having + to load it. + """ + # Get shape (in the same way that pydicom does) + if "NumberOfFrames" in self and self.NumberOfFrames > 1: + if self.SamplesPerPixel > 1: + shape = ( + self.SamplesPerPixel, + self.NumberOfFrames, + self.Rows, + self.Columns, + ) + else: + shape = self.NumberOfFrames, self.Rows, self.Columns + elif "SamplesPerPixel" in self: + if self.SamplesPerPixel > 1: + if self.BitsAllocated == 8: + shape = self.SamplesPerPixel, self.Rows, self.Columns + else: + raise NotImplementedError( + "DICOM plugin only handles " + "SamplesPerPixel > 1 if Bits " + "Allocated = 8" + ) + else: + shape = self.Rows, self.Columns + else: + raise RuntimeError( + "DICOM file has no SamplesPerPixel " "(perhaps this is a report?)" + ) + + # Try getting sampling between pixels + if "PixelSpacing" in self: + sampling = float(self.PixelSpacing[0]), float(self.PixelSpacing[1]) + else: + sampling = 1.0, 1.0 + if "SliceSpacing" in self: + sampling = (abs(self.SliceSpacing),) + sampling + + # Ensure that sampling has as many elements as shape + sampling = (1.0,) * (len(shape) - len(sampling)) + sampling[-len(shape) :] + + # Set shape and sampling + self._info["shape"] = shape + self._info["sampling"] = sampling + + def _pixel_data_numpy(self): + """Return a NumPy array of the pixel data.""" + # Taken from pydicom + # Copyright (c) 2008-2012 Darcy Mason + + if "PixelData" not in self: + raise TypeError("No pixel data found in this dataset.") + + # determine the type used for the array + need_byteswap = self.is_little_endian != sys_is_little_endian + + # Make NumPy format code, e.g. "uint16", "int32" etc + # from two pieces of info: + # self.PixelRepresentation -- 0 for unsigned, 1 for signed; + # self.BitsAllocated -- 8, 16, or 32 + format_str = "%sint%d" % ( + ("u", "")[self.PixelRepresentation], + self.BitsAllocated, + ) + try: + numpy_format = np.dtype(format_str) + except TypeError: # pragma: no cover + raise TypeError( + "Data type not understood by NumPy: format='%s', " + " PixelRepresentation=%d, BitsAllocated=%d" + % (numpy_format, self.PixelRepresentation, self.BitsAllocated) + ) + + # Have correct Numpy format, so create the NumPy array + arr = np.frombuffer(self.PixelData, numpy_format).copy() + + # XXX byte swap - may later handle this in read_file!!? + if need_byteswap: + arr.byteswap(True) # True means swap in-place, don't make new copy + + # Note the following reshape operations return a new *view* onto arr, + # but don't copy the data + arr = arr.reshape(*self._info["shape"]) + return arr + + def _apply_slope_and_offset(self, data): + """ + If RescaleSlope and RescaleIntercept are present in the data, + apply them. The data type of the data is changed if necessary. + """ + # Obtain slope and offset + slope, offset = 1, 0 + needFloats, needApplySlopeOffset = False, False + if "RescaleSlope" in self: + needApplySlopeOffset = True + slope = self.RescaleSlope + if "RescaleIntercept" in self: + needApplySlopeOffset = True + offset = self.RescaleIntercept + if int(slope) != slope or int(offset) != offset: + needFloats = True + if not needFloats: + slope, offset = int(slope), int(offset) + + # Apply slope and offset + if needApplySlopeOffset: + # Maybe we need to change the datatype? + if data.dtype in [np.float32, np.float64]: + pass + elif needFloats: + data = data.astype(np.float32) + else: + # Determine required range + minReq, maxReq = data.min(), data.max() + minReq = min([minReq, minReq * slope + offset, maxReq * slope + offset]) + maxReq = max([maxReq, minReq * slope + offset, maxReq * slope + offset]) + + # Determine required datatype from that + dtype = None + if minReq < 0: + # Signed integer type + maxReq = max([-minReq, maxReq]) + if maxReq < 2**7: + dtype = np.int8 + elif maxReq < 2**15: + dtype = np.int16 + elif maxReq < 2**31: + dtype = np.int32 + else: + dtype = np.float32 + else: + # Unsigned integer type + if maxReq < 2**8: + dtype = np.int8 + elif maxReq < 2**16: + dtype = np.int16 + elif maxReq < 2**32: + dtype = np.int32 + else: + dtype = np.float32 + # Change datatype + if dtype != data.dtype: + data = data.astype(dtype) + + # Apply slope and offset + data *= slope + data += offset + + # Done + return data + + def _inflate(self): + # Taken from pydicom + # Copyright (c) 2008-2012 Darcy Mason + import zlib + from io import BytesIO + + # See PS3.6-2008 A.5 (p 71) -- when written, the entire dataset + # following the file metadata was prepared the normal way, + # then "deflate" compression applied. + # All that is needed here is to decompress and then + # use as normal in a file-like object + zipped = self._file.read() + # -MAX_WBITS part is from comp.lang.python answer: + # groups.google.com/group/comp.lang.python/msg/e95b3b38a71e6799 + unzipped = zlib.decompress(zipped, -zlib.MAX_WBITS) + self._file = BytesIO(unzipped) # a file-like object + + +class DicomSeries(object): + """DicomSeries + This class represents a serie of dicom files (SimpleDicomReader + objects) that belong together. If these are multiple files, they + represent the slices of a volume (like for CT or MRI). + """ + + def __init__(self, suid, progressIndicator): + # Init dataset list and the callback + self._entries = [] + + # Init props + self._suid = suid + self._info = {} + self._progressIndicator = progressIndicator + + def __len__(self): + return len(self._entries) + + def __iter__(self): + return iter(self._entries) + + def __getitem__(self, index): + return self._entries[index] + + @property + def suid(self): + return self._suid + + @property + def shape(self): + """The shape of the data (nz, ny, nx).""" + return self._info["shape"] + + @property + def sampling(self): + """The sampling (voxel distances) of the data (dz, dy, dx).""" + return self._info["sampling"] + + @property + def info(self): + """A dictionary containing the information as present in the + first dicomfile of this serie. None if there are no entries.""" + return self._info + + @property + def description(self): + """A description of the dicom series. Used fields are + PatientName, shape of the data, SeriesDescription, and + ImageComments. + """ + info = self.info + + # If no info available, return simple description + if not info: # pragma: no cover + return "DicomSeries containing %i images" % len(self) + + fields = [] + # Give patient name + if "PatientName" in info: + fields.append("" + info["PatientName"]) + # Also add dimensions + if self.shape: + tmp = [str(d) for d in self.shape] + fields.append("x".join(tmp)) + # Try adding more fields + if "SeriesDescription" in info: + fields.append("'" + info["SeriesDescription"] + "'") + if "ImageComments" in info: + fields.append("'" + info["ImageComments"] + "'") + + # Combine + return " ".join(fields) + + def __repr__(self): + adr = hex(id(self)).upper() + return "" % (len(self), adr) + + def get_numpy_array(self): + """Get (load) the data that this DicomSeries represents, and return + it as a numpy array. If this serie contains multiple images, the + resulting array is 3D, otherwise it's 2D. + """ + + # It's easy if no file or if just a single file + if len(self) == 0: + raise ValueError("Serie does not contain any files.") + elif len(self) == 1: + return self[0].get_numpy_array() + + # Check info + if self.info is None: + raise RuntimeError("Cannot return volume if series not finished.") + + # Init data (using what the dicom packaged produces as a reference) + slice = self[0].get_numpy_array() + vol = np.zeros(self.shape, dtype=slice.dtype) + vol[0] = slice + + # Fill volume + self._progressIndicator.start("loading data", "", len(self)) + for z in range(1, len(self)): + vol[z] = self[z].get_numpy_array() + self._progressIndicator.set_progress(z + 1) + self._progressIndicator.finish() + + # Done + import gc + + gc.collect() + return vol + + def _append(self, dcm): + self._entries.append(dcm) + + def _sort(self): + self._entries.sort( + key=lambda k: ( + k.InstanceNumber, + ( + k.ImagePositionPatient[2] + if hasattr(k, "ImagePositionPatient") + else None + ), + ) + ) + + def _finish(self): + """ + Evaluate the series of dicom files. Together they should make up + a volumetric dataset. This means the files should meet certain + conditions. Also some additional information has to be calculated, + such as the distance between the slices. This method sets the + attributes for "shape", "sampling" and "info". + + This method checks: + * that there are no missing files + * that the dimensions of all images match + * that the pixel spacing of all images match + """ + + # The datasets list should be sorted by instance number + L = self._entries + if len(L) == 0: + return + elif len(L) == 1: + self._info = L[0].info + return + + # Get previous + ds1 = L[0] + # Init measures to calculate average of + distance_sum = 0.0 + # Init measures to check (these are in 2D) + dimensions = ds1.Rows, ds1.Columns + # sampling = float(ds1.PixelSpacing[0]), float(ds1.PixelSpacing[1]) + sampling = ds1.info["sampling"][:2] # row, column + + for index in range(len(L)): + # The first round ds1 and ds2 will be the same, for the + # distance calculation this does not matter + # Get current + ds2 = L[index] + # Get positions + pos1 = float(ds1.ImagePositionPatient[2]) + pos2 = float(ds2.ImagePositionPatient[2]) + # Update distance_sum to calculate distance later + distance_sum += abs(pos1 - pos2) + # Test measures + dimensions2 = ds2.Rows, ds2.Columns + # sampling2 = float(ds2.PixelSpacing[0]), float(ds2.PixelSpacing[1]) + sampling2 = ds2.info["sampling"][:2] # row, column + if dimensions != dimensions2: + # We cannot produce a volume if the dimensions match + raise ValueError("Dimensions of slices does not match.") + if sampling != sampling2: + # We can still produce a volume, but we should notify the user + self._progressIndicator.write("Warn: sampling does not match.") + # Store previous + ds1 = ds2 + + # Finish calculating average distance + # (Note that there are len(L)-1 distances) + distance_mean = distance_sum / (len(L) - 1) + + # Set info dict + self._info = L[0].info.copy() + + # Store information that is specific for the serie + self._info["shape"] = (len(L),) + ds2.info["shape"] + self._info["sampling"] = (distance_mean,) + ds2.info["sampling"] + + +def list_files(files, path): + """List all files in the directory, recursively.""" + for item in os.listdir(path): + item = os.path.join(path, item) + if os.path.isdir(item): + list_files(files, item) + elif os.path.isfile(item): + files.append(item) + + +def process_directory(request, progressIndicator, readPixelData=False): + """ + Reads dicom files and returns a list of DicomSeries objects, which + contain information about the data, and can be used to load the + image or volume data. + + if readPixelData is True, the pixel data of all series is read. By + default the loading of pixeldata is deferred until it is requested + using the DicomSeries.get_pixel_array() method. In general, both + methods should be equally fast. + """ + # Get directory to examine + if os.path.isdir(request.filename): + path = request.filename + elif os.path.isfile(request.filename): + path = os.path.dirname(request.filename) + else: # pragma: no cover - tested earlier + raise ValueError("Dicom plugin needs a valid filename to examine the directory") + + # Check files + files = [] + list_files(files, path) # Find files recursively + + # Gather file data and put in DicomSeries + series = {} + count = 0 + progressIndicator.start("examining files", "files", len(files)) + for filename in files: + # Show progress (note that we always start with a 0.0) + count += 1 + progressIndicator.set_progress(count) + # Skip DICOMDIR files + if filename.count("DICOMDIR"): # pragma: no cover + continue + # Try loading dicom ... + try: + dcm = SimpleDicomReader(filename) + except NotADicomFile: + continue # skip non-dicom file + except Exception as why: # pragma: no cover + progressIndicator.write(str(why)) + continue + # Get SUID and register the file with an existing or new series object + try: + suid = dcm.SeriesInstanceUID + except AttributeError: # pragma: no cover + continue # some other kind of dicom file + if suid not in series: + series[suid] = DicomSeries(suid, progressIndicator) + series[suid]._append(dcm) + + # Finish progress + # progressIndicator.finish('Found %i series.' % len(series)) + + # Make a list and sort, so that the order is deterministic + series = list(series.values()) + series.sort(key=lambda x: x.suid) + + # Split series if necessary + for serie in reversed([serie for serie in series]): + splitSerieIfRequired(serie, series, progressIndicator) + + # Finish all series + # progressIndicator.start('analyse series', '', len(series)) + series_ = [] + for i in range(len(series)): + try: + series[i]._finish() + series_.append(series[i]) + except Exception as err: # pragma: no cover + progressIndicator.write(str(err)) + pass # Skip serie (probably report-like file without pixels) + # progressIndicator.set_progress(i+1) + progressIndicator.finish("Found %i correct series." % len(series_)) + + # Done + return series_ + + +def splitSerieIfRequired(serie, series, progressIndicator): + """ + Split the serie in multiple series if this is required. The choice + is based on examing the image position relative to the previous + image. If it differs too much, it is assumed that there is a new + dataset. This can happen for example in unspitted gated CT data. + """ + + # Sort the original list and get local name + serie._sort() + L = serie._entries + # Init previous slice + ds1 = L[0] + # Check whether we can do this + if "ImagePositionPatient" not in ds1: + return + # Initialize a list of new lists + L2 = [[ds1]] + # Init slice distance estimate + distance = 0 + + for index in range(1, len(L)): + # Get current slice + ds2 = L[index] + # Get positions + pos1 = float(ds1.ImagePositionPatient[2]) + pos2 = float(ds2.ImagePositionPatient[2]) + # Get distances + newDist = abs(pos1 - pos2) + # deltaDist = abs(firstPos-pos2) + # If the distance deviates more than 2x from what we've seen, + # we can agree it's a new dataset. + if distance and newDist > 2.1 * distance: + L2.append([]) + distance = 0 + else: + # Test missing file + if distance and newDist > 1.5 * distance: + progressIndicator.write( + "Warning: missing file after %r" % ds1._filename + ) + distance = newDist + # Add to last list + L2[-1].append(ds2) + # Store previous + ds1 = ds2 + + # Split if we should + if len(L2) > 1: + # At what position are we now? + i = series.index(serie) + # Create new series + series2insert = [] + for L in L2: + newSerie = DicomSeries(serie.suid, progressIndicator) + newSerie._entries = L + series2insert.append(newSerie) + # Insert series and remove self + for newSerie in reversed(series2insert): + series.insert(i, newSerie) + series.remove(serie) diff --git a/.venv/Lib/site-packages/imageio/plugins/_freeimage.py b/.venv/Lib/site-packages/imageio/plugins/_freeimage.py new file mode 100644 index 00000000..99f7cbb1 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/_freeimage.py @@ -0,0 +1,1312 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +# styletest: ignore E261 + +""" Module imageio/freeimage.py + +This module contains the wrapper code for the freeimage library. +The functions defined in this module are relatively thin; just thin +enough so that arguments and results are native Python/numpy data +types. + +""" + +import os +import sys +import ctypes +import threading +import logging +import numpy + +from ..core import ( + get_remote_file, + load_lib, + Dict, + resource_dirs, + IS_PYPY, + get_platform, + InternetNotAllowedError, + NeedDownloadError, +) + +logger = logging.getLogger(__name__) + +TEST_NUMPY_NO_STRIDES = False # To test pypy fallback + +FNAME_PER_PLATFORM = { + "osx32": "libfreeimage-3.16.0-osx10.6.dylib", # universal library + "osx64": "libfreeimage-3.16.0-osx10.6.dylib", + "win32": "FreeImage-3.18.0-win32.dll", + "win64": "FreeImage-3.18.0-win64.dll", + "linux32": "libfreeimage-3.16.0-linux32.so", + "linux64": "libfreeimage-3.16.0-linux64.so", +} + + +def download(directory=None, force_download=False): + """Download the FreeImage library to your computer. + + Parameters + ---------- + directory : str | None + The directory where the file will be cached if a download was + required to obtain the file. By default, the appdata directory + is used. This is also the first directory that is checked for + a local version of the file. + force_download : bool | str + If True, the file will be downloaded even if a local copy exists + (and this copy will be overwritten). Can also be a YYYY-MM-DD date + to ensure a file is up-to-date (modified date of a file on disk, + if present, is checked). + """ + plat = get_platform() + if plat and plat in FNAME_PER_PLATFORM: + fname = "freeimage/" + FNAME_PER_PLATFORM[plat] + get_remote_file(fname=fname, directory=directory, force_download=force_download) + fi._lib = None # allow trying again (needed to make tests work) + + +def get_freeimage_lib(): + """Ensure we have our version of the binary freeimage lib.""" + + lib = os.getenv("IMAGEIO_FREEIMAGE_LIB", None) + if lib: # pragma: no cover + return lib + + # Get filename to load + # If we do not provide a binary, the system may still do ... + plat = get_platform() + if plat and plat in FNAME_PER_PLATFORM: + try: + return get_remote_file("freeimage/" + FNAME_PER_PLATFORM[plat], auto=False) + except InternetNotAllowedError: + pass + except NeedDownloadError: + raise NeedDownloadError( + "Need FreeImage library. " + "You can obtain it with either:\n" + " - download using the command: " + "imageio_download_bin freeimage\n" + " - download by calling (in Python): " + "imageio.plugins.freeimage.download()\n" + ) + except RuntimeError as e: # pragma: no cover + logger.warning(str(e)) + + +# Define function to encode a filename to bytes (for the current system) +def efn(x): + return x.encode(sys.getfilesystemencoding()) + + +# 4-byte quads of 0,v,v,v from 0,0,0,0 to 0,255,255,255 +GREY_PALETTE = numpy.arange(0, 0x01000000, 0x00010101, dtype=numpy.uint32) + + +class FI_TYPES(object): + FIT_UNKNOWN = 0 + FIT_BITMAP = 1 + FIT_UINT16 = 2 + FIT_INT16 = 3 + FIT_UINT32 = 4 + FIT_INT32 = 5 + FIT_FLOAT = 6 + FIT_DOUBLE = 7 + FIT_COMPLEX = 8 + FIT_RGB16 = 9 + FIT_RGBA16 = 10 + FIT_RGBF = 11 + FIT_RGBAF = 12 + + dtypes = { + FIT_BITMAP: numpy.uint8, + FIT_UINT16: numpy.uint16, + FIT_INT16: numpy.int16, + FIT_UINT32: numpy.uint32, + FIT_INT32: numpy.int32, + FIT_FLOAT: numpy.float32, + FIT_DOUBLE: numpy.float64, + FIT_COMPLEX: numpy.complex128, + FIT_RGB16: numpy.uint16, + FIT_RGBA16: numpy.uint16, + FIT_RGBF: numpy.float32, + FIT_RGBAF: numpy.float32, + } + + fi_types = { + (numpy.uint8, 1): FIT_BITMAP, + (numpy.uint8, 3): FIT_BITMAP, + (numpy.uint8, 4): FIT_BITMAP, + (numpy.uint16, 1): FIT_UINT16, + (numpy.int16, 1): FIT_INT16, + (numpy.uint32, 1): FIT_UINT32, + (numpy.int32, 1): FIT_INT32, + (numpy.float32, 1): FIT_FLOAT, + (numpy.float64, 1): FIT_DOUBLE, + (numpy.complex128, 1): FIT_COMPLEX, + (numpy.uint16, 3): FIT_RGB16, + (numpy.uint16, 4): FIT_RGBA16, + (numpy.float32, 3): FIT_RGBF, + (numpy.float32, 4): FIT_RGBAF, + } + + extra_dims = { + FIT_UINT16: [], + FIT_INT16: [], + FIT_UINT32: [], + FIT_INT32: [], + FIT_FLOAT: [], + FIT_DOUBLE: [], + FIT_COMPLEX: [], + FIT_RGB16: [3], + FIT_RGBA16: [4], + FIT_RGBF: [3], + FIT_RGBAF: [4], + } + + +class IO_FLAGS(object): + FIF_LOAD_NOPIXELS = 0x8000 # loading: load the image header only + # # (not supported by all plugins) + BMP_DEFAULT = 0 + BMP_SAVE_RLE = 1 + CUT_DEFAULT = 0 + DDS_DEFAULT = 0 + EXR_DEFAULT = 0 # save data as half with piz-based wavelet compression + EXR_FLOAT = 0x0001 # save data as float instead of half (not recommended) + EXR_NONE = 0x0002 # save with no compression + EXR_ZIP = 0x0004 # save with zlib compression, in blocks of 16 scan lines + EXR_PIZ = 0x0008 # save with piz-based wavelet compression + EXR_PXR24 = 0x0010 # save with lossy 24-bit float compression + EXR_B44 = 0x0020 # save with lossy 44% float compression + # # - goes to 22% when combined with EXR_LC + EXR_LC = 0x0040 # save images with one luminance and two chroma channels, + # # rather than as RGB (lossy compression) + FAXG3_DEFAULT = 0 + GIF_DEFAULT = 0 + GIF_LOAD256 = 1 # Load the image as a 256 color image with ununsed + # # palette entries, if it's 16 or 2 color + GIF_PLAYBACK = 2 # 'Play' the GIF to generate each frame (as 32bpp) + # # instead of returning raw frame data when loading + HDR_DEFAULT = 0 + ICO_DEFAULT = 0 + ICO_MAKEALPHA = 1 # convert to 32bpp and create an alpha channel from the + # # AND-mask when loading + IFF_DEFAULT = 0 + J2K_DEFAULT = 0 # save with a 16:1 rate + JP2_DEFAULT = 0 # save with a 16:1 rate + JPEG_DEFAULT = 0 # loading (see JPEG_FAST); + # # saving (see JPEG_QUALITYGOOD|JPEG_SUBSAMPLING_420) + JPEG_FAST = 0x0001 # load the file as fast as possible, + # # sacrificing some quality + JPEG_ACCURATE = 0x0002 # load the file with the best quality, + # # sacrificing some speed + JPEG_CMYK = 0x0004 # load separated CMYK "as is" + # # (use | to combine with other load flags) + JPEG_EXIFROTATE = 0x0008 # load and rotate according to + # # Exif 'Orientation' tag if available + JPEG_QUALITYSUPERB = 0x80 # save with superb quality (100:1) + JPEG_QUALITYGOOD = 0x0100 # save with good quality (75:1) + JPEG_QUALITYNORMAL = 0x0200 # save with normal quality (50:1) + JPEG_QUALITYAVERAGE = 0x0400 # save with average quality (25:1) + JPEG_QUALITYBAD = 0x0800 # save with bad quality (10:1) + JPEG_PROGRESSIVE = 0x2000 # save as a progressive-JPEG + # # (use | to combine with other save flags) + JPEG_SUBSAMPLING_411 = 0x1000 # save with high 4x1 chroma + # # subsampling (4:1:1) + JPEG_SUBSAMPLING_420 = 0x4000 # save with medium 2x2 medium chroma + # # subsampling (4:2:0) - default value + JPEG_SUBSAMPLING_422 = 0x8000 # save /w low 2x1 chroma subsampling (4:2:2) + JPEG_SUBSAMPLING_444 = 0x10000 # save with no chroma subsampling (4:4:4) + JPEG_OPTIMIZE = 0x20000 # on saving, compute optimal Huffman coding tables + # # (can reduce a few percent of file size) + JPEG_BASELINE = 0x40000 # save basic JPEG, without metadata or any markers + KOALA_DEFAULT = 0 + LBM_DEFAULT = 0 + MNG_DEFAULT = 0 + PCD_DEFAULT = 0 + PCD_BASE = 1 # load the bitmap sized 768 x 512 + PCD_BASEDIV4 = 2 # load the bitmap sized 384 x 256 + PCD_BASEDIV16 = 3 # load the bitmap sized 192 x 128 + PCX_DEFAULT = 0 + PFM_DEFAULT = 0 + PICT_DEFAULT = 0 + PNG_DEFAULT = 0 + PNG_IGNOREGAMMA = 1 # loading: avoid gamma correction + PNG_Z_BEST_SPEED = 0x0001 # save using ZLib level 1 compression flag + # # (default value is 6) + PNG_Z_DEFAULT_COMPRESSION = 0x0006 # save using ZLib level 6 compression + # # flag (default recommended value) + PNG_Z_BEST_COMPRESSION = 0x0009 # save using ZLib level 9 compression flag + # # (default value is 6) + PNG_Z_NO_COMPRESSION = 0x0100 # save without ZLib compression + PNG_INTERLACED = 0x0200 # save using Adam7 interlacing (use | to combine + # # with other save flags) + PNM_DEFAULT = 0 + PNM_SAVE_RAW = 0 # Writer saves in RAW format (i.e. P4, P5 or P6) + PNM_SAVE_ASCII = 1 # Writer saves in ASCII format (i.e. P1, P2 or P3) + PSD_DEFAULT = 0 + PSD_CMYK = 1 # reads tags for separated CMYK (default is conversion to RGB) + PSD_LAB = 2 # reads tags for CIELab (default is conversion to RGB) + RAS_DEFAULT = 0 + RAW_DEFAULT = 0 # load the file as linear RGB 48-bit + RAW_PREVIEW = 1 # try to load the embedded JPEG preview with included + # # Exif Data or default to RGB 24-bit + RAW_DISPLAY = 2 # load the file as RGB 24-bit + SGI_DEFAULT = 0 + TARGA_DEFAULT = 0 + TARGA_LOAD_RGB888 = 1 # Convert RGB555 and ARGB8888 -> RGB888. + TARGA_SAVE_RLE = 2 # Save with RLE compression + TIFF_DEFAULT = 0 + TIFF_CMYK = 0x0001 # reads/stores tags for separated CMYK + # # (use | to combine with compression flags) + TIFF_PACKBITS = 0x0100 # save using PACKBITS compression + TIFF_DEFLATE = 0x0200 # save using DEFLATE (a.k.a. ZLIB) compression + TIFF_ADOBE_DEFLATE = 0x0400 # save using ADOBE DEFLATE compression + TIFF_NONE = 0x0800 # save without any compression + TIFF_CCITTFAX3 = 0x1000 # save using CCITT Group 3 fax encoding + TIFF_CCITTFAX4 = 0x2000 # save using CCITT Group 4 fax encoding + TIFF_LZW = 0x4000 # save using LZW compression + TIFF_JPEG = 0x8000 # save using JPEG compression + TIFF_LOGLUV = 0x10000 # save using LogLuv compression + WBMP_DEFAULT = 0 + XBM_DEFAULT = 0 + XPM_DEFAULT = 0 + + +class METADATA_MODELS(object): + FIMD_COMMENTS = 0 + FIMD_EXIF_MAIN = 1 + FIMD_EXIF_EXIF = 2 + FIMD_EXIF_GPS = 3 + FIMD_EXIF_MAKERNOTE = 4 + FIMD_EXIF_INTEROP = 5 + FIMD_IPTC = 6 + FIMD_XMP = 7 + FIMD_GEOTIFF = 8 + FIMD_ANIMATION = 9 + + +class METADATA_DATATYPE(object): + FIDT_BYTE = 1 # 8-bit unsigned integer + FIDT_ASCII = 2 # 8-bit bytes w/ last byte null + FIDT_SHORT = 3 # 16-bit unsigned integer + FIDT_LONG = 4 # 32-bit unsigned integer + FIDT_RATIONAL = 5 # 64-bit unsigned fraction + FIDT_SBYTE = 6 # 8-bit signed integer + FIDT_UNDEFINED = 7 # 8-bit untyped data + FIDT_SSHORT = 8 # 16-bit signed integer + FIDT_SLONG = 9 # 32-bit signed integer + FIDT_SRATIONAL = 10 # 64-bit signed fraction + FIDT_FLOAT = 11 # 32-bit IEEE floating point + FIDT_DOUBLE = 12 # 64-bit IEEE floating point + FIDT_IFD = 13 # 32-bit unsigned integer (offset) + FIDT_PALETTE = 14 # 32-bit RGBQUAD + FIDT_LONG8 = 16 # 64-bit unsigned integer + FIDT_SLONG8 = 17 # 64-bit signed integer + FIDT_IFD8 = 18 # 64-bit unsigned integer (offset) + + dtypes = { + FIDT_BYTE: numpy.uint8, + FIDT_SHORT: numpy.uint16, + FIDT_LONG: numpy.uint32, + FIDT_RATIONAL: [("numerator", numpy.uint32), ("denominator", numpy.uint32)], + FIDT_LONG8: numpy.uint64, + FIDT_SLONG8: numpy.int64, + FIDT_IFD8: numpy.uint64, + FIDT_SBYTE: numpy.int8, + FIDT_UNDEFINED: numpy.uint8, + FIDT_SSHORT: numpy.int16, + FIDT_SLONG: numpy.int32, + FIDT_SRATIONAL: [("numerator", numpy.int32), ("denominator", numpy.int32)], + FIDT_FLOAT: numpy.float32, + FIDT_DOUBLE: numpy.float64, + FIDT_IFD: numpy.uint32, + FIDT_PALETTE: [ + ("R", numpy.uint8), + ("G", numpy.uint8), + ("B", numpy.uint8), + ("A", numpy.uint8), + ], + } + + +class Freeimage(object): + """Class to represent an interface to the FreeImage library. + This class is relatively thin. It provides a Pythonic API that converts + Freeimage objects to Python objects, but that's about it. + The actual implementation should be provided by the plugins. + + The recommended way to call into the Freeimage library (so that + errors and warnings show up in the right moment) is to use this + object as a context manager: + with imageio.fi as lib: + lib.FreeImage_GetPalette() + + """ + + _API = { + # All we're doing here is telling ctypes that some of the + # FreeImage functions return pointers instead of integers. (On + # 64-bit systems, without this information the pointers get + # truncated and crashes result). There's no need to list + # functions that return ints, or the types of the parameters + # to these or other functions -- that's fine to do implicitly. + # Note that the ctypes immediately converts the returned void_p + # back to a python int again! This is really not helpful, + # because then passing it back to another library call will + # cause truncation-to-32-bits on 64-bit systems. Thanks, ctypes! + # So after these calls one must immediately re-wrap the int as + # a c_void_p if it is to be passed back into FreeImage. + "FreeImage_AllocateT": (ctypes.c_void_p, None), + "FreeImage_FindFirstMetadata": (ctypes.c_void_p, None), + "FreeImage_GetBits": (ctypes.c_void_p, None), + "FreeImage_GetPalette": (ctypes.c_void_p, None), + "FreeImage_GetTagKey": (ctypes.c_char_p, None), + "FreeImage_GetTagValue": (ctypes.c_void_p, None), + "FreeImage_CreateTag": (ctypes.c_void_p, None), + "FreeImage_Save": (ctypes.c_void_p, None), + "FreeImage_Load": (ctypes.c_void_p, None), + "FreeImage_LoadFromMemory": (ctypes.c_void_p, None), + "FreeImage_OpenMultiBitmap": (ctypes.c_void_p, None), + "FreeImage_LoadMultiBitmapFromMemory": (ctypes.c_void_p, None), + "FreeImage_LockPage": (ctypes.c_void_p, None), + "FreeImage_OpenMemory": (ctypes.c_void_p, None), + # 'FreeImage_ReadMemory': (ctypes.c_void_p, None), + # 'FreeImage_CloseMemory': (ctypes.c_void_p, None), + "FreeImage_GetVersion": (ctypes.c_char_p, None), + "FreeImage_GetFIFExtensionList": (ctypes.c_char_p, None), + "FreeImage_GetFormatFromFIF": (ctypes.c_char_p, None), + "FreeImage_GetFIFDescription": (ctypes.c_char_p, None), + "FreeImage_ColorQuantizeEx": (ctypes.c_void_p, None), + # Pypy wants some extra definitions, so here we go ... + "FreeImage_IsLittleEndian": (ctypes.c_int, None), + "FreeImage_SetOutputMessage": (ctypes.c_void_p, None), + "FreeImage_GetFIFCount": (ctypes.c_int, None), + "FreeImage_IsPluginEnabled": (ctypes.c_int, None), + "FreeImage_GetFileType": (ctypes.c_int, None), + # + "FreeImage_GetTagType": (ctypes.c_int, None), + "FreeImage_GetTagLength": (ctypes.c_int, None), + "FreeImage_FindNextMetadata": (ctypes.c_int, None), + "FreeImage_FindCloseMetadata": (ctypes.c_void_p, None), + # + "FreeImage_GetFIFFromFilename": (ctypes.c_int, None), + "FreeImage_FIFSupportsReading": (ctypes.c_int, None), + "FreeImage_FIFSupportsWriting": (ctypes.c_int, None), + "FreeImage_FIFSupportsExportType": (ctypes.c_int, None), + "FreeImage_FIFSupportsExportBPP": (ctypes.c_int, None), + "FreeImage_GetHeight": (ctypes.c_int, None), + "FreeImage_GetWidth": (ctypes.c_int, None), + "FreeImage_GetImageType": (ctypes.c_int, None), + "FreeImage_GetBPP": (ctypes.c_int, None), + "FreeImage_GetColorsUsed": (ctypes.c_int, None), + "FreeImage_ConvertTo32Bits": (ctypes.c_void_p, None), + "FreeImage_GetPitch": (ctypes.c_int, None), + "FreeImage_Unload": (ctypes.c_void_p, None), + } + + def __init__(self): + # Initialize freeimage lib as None + self._lib = None + + # A lock to create thread-safety + self._lock = threading.RLock() + + # Init log messages lists + self._messages = [] + + # Select functype for error handler + if sys.platform.startswith("win"): + functype = ctypes.WINFUNCTYPE + else: + functype = ctypes.CFUNCTYPE + + # Create output message handler + @functype(None, ctypes.c_int, ctypes.c_char_p) + def error_handler(fif, message): + message = message.decode("utf-8") + self._messages.append(message) + while (len(self._messages)) > 256: + self._messages.pop(0) + + # Make sure to keep a ref to function + self._error_handler = error_handler + + @property + def lib(self): + if self._lib is None: + try: + self.load_freeimage() + except OSError as err: + self._lib = "The freeimage library could not be loaded: " + self._lib += str(err) + if isinstance(self._lib, str): + raise RuntimeError(self._lib) + return self._lib + + def has_lib(self): + try: + self.lib + except Exception: + return False + return True + + def load_freeimage(self): + """Try to load the freeimage lib from the system. If not successful, + try to download the imageio version and try again. + """ + # Load library and register API + success = False + try: + # Try without forcing a download, but giving preference + # to the imageio-provided lib (if previously downloaded) + self._load_freeimage() + self._register_api() + if self.lib.FreeImage_GetVersion().decode("utf-8") >= "3.15": + success = True + except OSError: + pass + + if not success: + # Ensure we have our own lib, try again + get_freeimage_lib() + self._load_freeimage() + self._register_api() + + # Wrap up + self.lib.FreeImage_SetOutputMessage(self._error_handler) + self.lib_version = self.lib.FreeImage_GetVersion().decode("utf-8") + + def _load_freeimage(self): + # Define names + lib_names = ["freeimage", "libfreeimage"] + exact_lib_names = [ + "FreeImage", + "libfreeimage.dylib", + "libfreeimage.so", + "libfreeimage.so.3", + ] + # Add names of libraries that we provide (that file may not exist) + res_dirs = resource_dirs() + plat = get_platform() + if plat: # Can be None on e.g. FreeBSD + fname = FNAME_PER_PLATFORM[plat] + for dir in res_dirs: + exact_lib_names.insert(0, os.path.join(dir, "freeimage", fname)) + + # Add the path specified with IMAGEIO_FREEIMAGE_LIB: + lib = os.getenv("IMAGEIO_FREEIMAGE_LIB", None) + if lib is not None: + exact_lib_names.insert(0, lib) + + # Load + try: + lib, fname = load_lib(exact_lib_names, lib_names, res_dirs) + except OSError as err: # pragma: no cover + err_msg = str(err) + "\nPlease install the FreeImage library." + raise OSError(err_msg) + + # Store + self._lib = lib + self.lib_fname = fname + + def _register_api(self): + # Albert's ctypes pattern + for f, (restype, argtypes) in self._API.items(): + func = getattr(self.lib, f) + func.restype = restype + func.argtypes = argtypes + + # Handling of output messages + + def __enter__(self): + self._lock.acquire() + return self.lib + + def __exit__(self, *args): + self._show_any_warnings() + self._lock.release() + + def _reset_log(self): + """Reset the list of output messages. Call this before + loading or saving an image with the FreeImage API. + """ + self._messages = [] + + def _get_error_message(self): + """Get the output messages produced since the last reset as + one string. Returns 'No known reason.' if there are no messages. + Also resets the log. + """ + if self._messages: + res = " ".join(self._messages) + self._reset_log() + return res + else: + return "No known reason." + + def _show_any_warnings(self): + """If there were any messages since the last reset, show them + as a warning. Otherwise do nothing. Also resets the messages. + """ + if self._messages: + logger.warning("imageio.freeimage warning: " + self._get_error_message()) + self._reset_log() + + def get_output_log(self): + """Return a list of the last 256 output messages + (warnings and errors) produced by the FreeImage library. + """ + # This message log is not cleared/reset, but kept to 256 elements. + return [m for m in self._messages] + + def getFIF(self, filename, mode, bb=None): + """Get the freeimage Format (FIF) from a given filename. + If mode is 'r', will try to determine the format by reading + the file, otherwise only the filename is used. + + This function also tests whether the format supports reading/writing. + """ + with self as lib: + # Init + ftype = -1 + if mode not in "rw": + raise ValueError('Invalid mode (must be "r" or "w").') + + # Try getting format from the content. Note that some files + # do not have a header that allows reading the format from + # the file. + if mode == "r": + if bb is not None: + fimemory = lib.FreeImage_OpenMemory(ctypes.c_char_p(bb), len(bb)) + ftype = lib.FreeImage_GetFileTypeFromMemory( + ctypes.c_void_p(fimemory), len(bb) + ) + lib.FreeImage_CloseMemory(ctypes.c_void_p(fimemory)) + if (ftype == -1) and os.path.isfile(filename): + ftype = lib.FreeImage_GetFileType(efn(filename), 0) + # Try getting the format from the extension + if ftype == -1: + ftype = lib.FreeImage_GetFIFFromFilename(efn(filename)) + + # Test if ok + if ftype == -1: + raise ValueError('Cannot determine format of file "%s"' % filename) + elif mode == "w" and not lib.FreeImage_FIFSupportsWriting(ftype): + raise ValueError('Cannot write the format of file "%s"' % filename) + elif mode == "r" and not lib.FreeImage_FIFSupportsReading(ftype): + raise ValueError('Cannot read the format of file "%s"' % filename) + return ftype + + def create_bitmap(self, filename, ftype, flags=0): + """create_bitmap(filename, ftype, flags=0) + Create a wrapped bitmap object. + """ + return FIBitmap(self, filename, ftype, flags) + + def create_multipage_bitmap(self, filename, ftype, flags=0): + """create_multipage_bitmap(filename, ftype, flags=0) + Create a wrapped multipage bitmap object. + """ + return FIMultipageBitmap(self, filename, ftype, flags) + + +class FIBaseBitmap(object): + def __init__(self, fi, filename, ftype, flags): + self._fi = fi + self._filename = filename + self._ftype = ftype + self._flags = flags + self._bitmap = None + self._close_funcs = [] + + def __del__(self): + self.close() + + def close(self): + if (self._bitmap is not None) and self._close_funcs: + for close_func in self._close_funcs: + try: + with self._fi: + fun = close_func[0] + fun(*close_func[1:]) + except Exception: # pragma: no cover + pass + self._close_funcs = [] + self._bitmap = None + + def _set_bitmap(self, bitmap, close_func=None): + """Function to set the bitmap and specify the function to unload it.""" + if self._bitmap is not None: + pass # bitmap is converted + if close_func is None: + close_func = self._fi.lib.FreeImage_Unload, bitmap + + self._bitmap = bitmap + if close_func: + self._close_funcs.append(close_func) + + def get_meta_data(self): + # todo: there is also FreeImage_TagToString, is that useful? + # and would that work well when reading and then saving? + + # Create a list of (model_name, number) tuples + models = [ + (name[5:], number) + for name, number in METADATA_MODELS.__dict__.items() + if name.startswith("FIMD_") + ] + + # Prepare + metadata = Dict() + tag = ctypes.c_void_p() + + with self._fi as lib: + # Iterate over all FreeImage meta models + for model_name, number in models: + # Find beginning, get search handle + mdhandle = lib.FreeImage_FindFirstMetadata( + number, self._bitmap, ctypes.byref(tag) + ) + mdhandle = ctypes.c_void_p(mdhandle) + if mdhandle: + # Iterate over all tags in this model + more = True + while more: + # Get info about tag + tag_name = lib.FreeImage_GetTagKey(tag).decode("utf-8") + tag_type = lib.FreeImage_GetTagType(tag) + byte_size = lib.FreeImage_GetTagLength(tag) + char_ptr = ctypes.c_char * byte_size + data = char_ptr.from_address(lib.FreeImage_GetTagValue(tag)) + # Convert in a way compatible with Pypy + tag_bytes = bytes(bytearray(data)) + # The default value is the raw bytes + tag_val = tag_bytes + # Convert to a Python value in the metadata dict + if tag_type == METADATA_DATATYPE.FIDT_ASCII: + tag_val = tag_bytes.decode("utf-8", "replace") + elif tag_type in METADATA_DATATYPE.dtypes: + dtype = METADATA_DATATYPE.dtypes[tag_type] + if IS_PYPY and isinstance(dtype, (list, tuple)): + pass # pragma: no cover - or we get a segfault + else: + try: + tag_val = numpy.frombuffer( + tag_bytes, dtype=dtype + ).copy() + if len(tag_val) == 1: + tag_val = tag_val[0] + except Exception: # pragma: no cover + pass + # Store data in dict + subdict = metadata.setdefault(model_name, Dict()) + subdict[tag_name] = tag_val + # Next + more = lib.FreeImage_FindNextMetadata( + mdhandle, ctypes.byref(tag) + ) + + # Close search handle for current meta model + lib.FreeImage_FindCloseMetadata(mdhandle) + + # Done + return metadata + + def set_meta_data(self, metadata): + # Create a dict mapping model_name to number + models = {} + for name, number in METADATA_MODELS.__dict__.items(): + if name.startswith("FIMD_"): + models[name[5:]] = number + + # Create a mapping from numpy.dtype to METADATA_DATATYPE + def get_tag_type_number(dtype): + for number, numpy_dtype in METADATA_DATATYPE.dtypes.items(): + if dtype == numpy_dtype: + return number + else: + return None + + with self._fi as lib: + for model_name, subdict in metadata.items(): + # Get model number + number = models.get(model_name, None) + if number is None: + continue # Unknown model, silent ignore + + for tag_name, tag_val in subdict.items(): + # Create new tag + tag = lib.FreeImage_CreateTag() + tag = ctypes.c_void_p(tag) + + try: + # Convert Python value to FI type, val + is_ascii = False + if isinstance(tag_val, str): + try: + tag_bytes = tag_val.encode("ascii") + is_ascii = True + except UnicodeError: + pass + if is_ascii: + tag_type = METADATA_DATATYPE.FIDT_ASCII + tag_count = len(tag_bytes) + else: + if not hasattr(tag_val, "dtype"): + tag_val = numpy.array([tag_val]) + tag_type = get_tag_type_number(tag_val.dtype) + if tag_type is None: + logger.warning( + "imageio.freeimage warning: Could not " + "determine tag type of %r." % tag_name + ) + continue + tag_bytes = tag_val.tobytes() + tag_count = tag_val.size + # Set properties + lib.FreeImage_SetTagKey(tag, tag_name.encode("utf-8")) + lib.FreeImage_SetTagType(tag, tag_type) + lib.FreeImage_SetTagCount(tag, tag_count) + lib.FreeImage_SetTagLength(tag, len(tag_bytes)) + lib.FreeImage_SetTagValue(tag, tag_bytes) + # Store tag + tag_key = lib.FreeImage_GetTagKey(tag) + lib.FreeImage_SetMetadata(number, self._bitmap, tag_key, tag) + + except Exception as err: # pragma: no cover + logger.warning( + "imagio.freeimage warning: Could not set tag " + "%r: %s, %s" + % (tag_name, self._fi._get_error_message(), str(err)) + ) + finally: + lib.FreeImage_DeleteTag(tag) + + +class FIBitmap(FIBaseBitmap): + """Wrapper for the FI bitmap object.""" + + def allocate(self, array): + # Prepare array + assert isinstance(array, numpy.ndarray) + shape = array.shape + dtype = array.dtype + + # Get shape and channel info + r, c = shape[:2] + if len(shape) == 2: + n_channels = 1 + elif len(shape) == 3: + n_channels = shape[2] + else: + n_channels = shape[0] + + # Get fi_type + try: + fi_type = FI_TYPES.fi_types[(dtype.type, n_channels)] + self._fi_type = fi_type + except KeyError: + raise ValueError("Cannot write arrays of given type and shape.") + + # Allocate bitmap + with self._fi as lib: + bpp = 8 * dtype.itemsize * n_channels + bitmap = lib.FreeImage_AllocateT(fi_type, c, r, bpp, 0, 0, 0) + bitmap = ctypes.c_void_p(bitmap) + + # Check and store + if not bitmap: # pragma: no cover + raise RuntimeError( + "Could not allocate bitmap for storage: %s" + % self._fi._get_error_message() + ) + self._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap)) + + def load_from_filename(self, filename=None): + if filename is None: + filename = self._filename + + with self._fi as lib: + # Create bitmap + bitmap = lib.FreeImage_Load(self._ftype, efn(filename), self._flags) + bitmap = ctypes.c_void_p(bitmap) + + # Check and store + if not bitmap: # pragma: no cover + raise ValueError( + 'Could not load bitmap "%s": %s' + % (self._filename, self._fi._get_error_message()) + ) + self._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap)) + + # def load_from_bytes(self, bb): + # with self._fi as lib: + # # Create bitmap + # fimemory = lib.FreeImage_OpenMemory( + # ctypes.c_char_p(bb), len(bb)) + # bitmap = lib.FreeImage_LoadFromMemory( + # self._ftype, ctypes.c_void_p(fimemory), self._flags) + # bitmap = ctypes.c_void_p(bitmap) + # lib.FreeImage_CloseMemory(ctypes.c_void_p(fimemory)) + # + # # Check + # if not bitmap: + # raise ValueError('Could not load bitmap "%s": %s' + # % (self._filename, self._fi._get_error_message())) + # else: + # self._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap)) + + def save_to_filename(self, filename=None): + if filename is None: + filename = self._filename + + ftype = self._ftype + bitmap = self._bitmap + fi_type = self._fi_type # element type + + with self._fi as lib: + # Check if can write + if fi_type == FI_TYPES.FIT_BITMAP: + can_write = lib.FreeImage_FIFSupportsExportBPP( + ftype, lib.FreeImage_GetBPP(bitmap) + ) + else: + can_write = lib.FreeImage_FIFSupportsExportType(ftype, fi_type) + if not can_write: + raise TypeError("Cannot save image of this format to this file type") + + # Save to file + res = lib.FreeImage_Save(ftype, bitmap, efn(filename), self._flags) + # Check + if res is None: # pragma: no cover, we do so many checks, this is rare + raise RuntimeError( + f"Could not save file `{self._filename}`: {self._fi._get_error_message()}" + ) + + # def save_to_bytes(self): + # ftype = self._ftype + # bitmap = self._bitmap + # fi_type = self._fi_type # element type + # + # with self._fi as lib: + # # Check if can write + # if fi_type == FI_TYPES.FIT_BITMAP: + # can_write = lib.FreeImage_FIFSupportsExportBPP(ftype, + # lib.FreeImage_GetBPP(bitmap)) + # else: + # can_write = lib.FreeImage_FIFSupportsExportType(ftype, fi_type) + # if not can_write: + # raise TypeError('Cannot save image of this format ' + # 'to this file type') + # + # # Extract the bytes + # fimemory = lib.FreeImage_OpenMemory(0, 0) + # res = lib.FreeImage_SaveToMemory(ftype, bitmap, + # ctypes.c_void_p(fimemory), + # self._flags) + # if res: + # N = lib.FreeImage_TellMemory(ctypes.c_void_p(fimemory)) + # result = ctypes.create_string_buffer(N) + # lib.FreeImage_SeekMemory(ctypes.c_void_p(fimemory), 0) + # lib.FreeImage_ReadMemory(result, 1, N, ctypes.c_void_p(fimemory)) + # result = result.raw + # lib.FreeImage_CloseMemory(ctypes.c_void_p(fimemory)) + # + # # Check + # if not res: + # raise RuntimeError('Could not save file "%s": %s' + # % (self._filename, self._fi._get_error_message())) + # + # # Done + # return result + + def get_image_data(self): + dtype, shape, bpp = self._get_type_and_shape() + array = self._wrap_bitmap_bits_in_array(shape, dtype, False) + with self._fi as lib: + isle = lib.FreeImage_IsLittleEndian() + + # swizzle the color components and flip the scanlines to go from + # FreeImage's BGR[A] and upside-down internal memory format to + # something more normal + def n(arr): + # return arr[..., ::-1].T # Does not work on numpypy yet + if arr.ndim == 1: # pragma: no cover + return arr[::-1].T + elif arr.ndim == 2: # Always the case here ... + return arr[:, ::-1].T + elif arr.ndim == 3: # pragma: no cover + return arr[:, :, ::-1].T + elif arr.ndim == 4: # pragma: no cover + return arr[:, :, :, ::-1].T + + if len(shape) == 3 and isle and dtype.type == numpy.uint8: + b = n(array[0]) + g = n(array[1]) + r = n(array[2]) + if shape[0] == 3: + return numpy.dstack((r, g, b)) + elif shape[0] == 4: + a = n(array[3]) + return numpy.dstack((r, g, b, a)) + else: # pragma: no cover - we check this earlier + raise ValueError("Cannot handle images of shape %s" % shape) + + # We need to copy because array does *not* own its memory + # after bitmap is freed. + a = n(array).copy() + return a + + def set_image_data(self, array): + # Prepare array + assert isinstance(array, numpy.ndarray) + shape = array.shape + dtype = array.dtype + with self._fi as lib: + isle = lib.FreeImage_IsLittleEndian() + + # Calculate shape and channels + r, c = shape[:2] + if len(shape) == 2: + n_channels = 1 + w_shape = (c, r) + elif len(shape) == 3: + n_channels = shape[2] + w_shape = (n_channels, c, r) + else: + n_channels = shape[0] + + def n(arr): # normalise to freeimage's in-memory format + return arr[::-1].T + + wrapped_array = self._wrap_bitmap_bits_in_array(w_shape, dtype, True) + # swizzle the color components and flip the scanlines to go to + # FreeImage's BGR[A] and upside-down internal memory format + # The BGR[A] order is only used for 8bits per channel images + # on little endian machines. For everything else RGB[A] is + # used. + if len(shape) == 3 and isle and dtype.type == numpy.uint8: + R = array[:, :, 0] + G = array[:, :, 1] + B = array[:, :, 2] + wrapped_array[0] = n(B) + wrapped_array[1] = n(G) + wrapped_array[2] = n(R) + if shape[2] == 4: + A = array[:, :, 3] + wrapped_array[3] = n(A) + else: + wrapped_array[:] = n(array) + if self._need_finish: + self._finish_wrapped_array(wrapped_array) + + if len(shape) == 2 and dtype.type == numpy.uint8: + with self._fi as lib: + palette = lib.FreeImage_GetPalette(self._bitmap) + palette = ctypes.c_void_p(palette) + if not palette: + raise RuntimeError("Could not get image palette") + try: + palette_data = GREY_PALETTE.ctypes.data + except Exception: # pragma: no cover - IS_PYPY + palette_data = GREY_PALETTE.__array_interface__["data"][0] + ctypes.memmove(palette, palette_data, 1024) + + def _wrap_bitmap_bits_in_array(self, shape, dtype, save): + """Return an ndarray view on the data in a FreeImage bitmap. Only + valid for as long as the bitmap is loaded (if single page) / locked + in memory (if multipage). This is used in loading data, but + also during saving, to prepare a strided numpy array buffer. + + """ + # Get bitmap info + with self._fi as lib: + pitch = lib.FreeImage_GetPitch(self._bitmap) + bits = lib.FreeImage_GetBits(self._bitmap) + + # Get more info + height = shape[-1] + byte_size = height * pitch + itemsize = dtype.itemsize + + # Get strides + if len(shape) == 3: + strides = (itemsize, shape[0] * itemsize, pitch) + else: + strides = (itemsize, pitch) + + # Create numpy array and return + data = (ctypes.c_char * byte_size).from_address(bits) + try: + self._need_finish = False + if TEST_NUMPY_NO_STRIDES: + raise NotImplementedError() + return numpy.ndarray(shape, dtype=dtype, buffer=data, strides=strides) + except NotImplementedError: + # IS_PYPY - not very efficient. We create a C-contiguous + # numpy array (because pypy does not support Fortran-order) + # and shape it such that the rest of the code can remain. + if save: + self._need_finish = True # Flag to use _finish_wrapped_array + return numpy.zeros(shape, dtype=dtype) + else: + bb = bytes(bytearray(data)) + array = numpy.frombuffer(bb, dtype=dtype).copy() + # Deal with strides + if len(shape) == 3: + array.shape = shape[2], strides[-1] // shape[0], shape[0] + array2 = array[: shape[2], : shape[1], : shape[0]] + array = numpy.zeros(shape, dtype=array.dtype) + for i in range(shape[0]): + array[i] = array2[:, :, i].T + else: + array.shape = shape[1], strides[-1] + array = array[: shape[1], : shape[0]].T + return array + + def _finish_wrapped_array(self, array): # IS_PYPY + """Hardcore way to inject numpy array in bitmap.""" + # Get bitmap info + with self._fi as lib: + pitch = lib.FreeImage_GetPitch(self._bitmap) + bits = lib.FreeImage_GetBits(self._bitmap) + bpp = lib.FreeImage_GetBPP(self._bitmap) + # Get channels and realwidth + nchannels = bpp // 8 // array.itemsize + realwidth = pitch // nchannels + # Apply padding for pitch if necessary + extra = realwidth - array.shape[-2] + assert 0 <= extra < 10 + # Make sort of Fortran, also take padding (i.e. pitch) into account + newshape = array.shape[-1], realwidth, nchannels + array2 = numpy.zeros(newshape, array.dtype) + if nchannels == 1: + array2[:, : array.shape[-2], 0] = array.T + else: + for i in range(nchannels): + array2[:, : array.shape[-2], i] = array[i, :, :].T + # copy data + data_ptr = array2.__array_interface__["data"][0] + ctypes.memmove(bits, data_ptr, array2.nbytes) + del array2 + + def _get_type_and_shape(self): + bitmap = self._bitmap + + # Get info on bitmap + with self._fi as lib: + w = lib.FreeImage_GetWidth(bitmap) + h = lib.FreeImage_GetHeight(bitmap) + self._fi_type = fi_type = lib.FreeImage_GetImageType(bitmap) + if not fi_type: + raise ValueError("Unknown image pixel type") + + # Determine required props for numpy array + bpp = None + dtype = FI_TYPES.dtypes[fi_type] + + if fi_type == FI_TYPES.FIT_BITMAP: + with self._fi as lib: + bpp = lib.FreeImage_GetBPP(bitmap) + has_pallette = lib.FreeImage_GetColorsUsed(bitmap) + if has_pallette: + # Examine the palette. If it is grayscale, we return as such + if has_pallette == 256: + palette = lib.FreeImage_GetPalette(bitmap) + palette = ctypes.c_void_p(palette) + p = (ctypes.c_uint8 * (256 * 4)).from_address(palette.value) + p = numpy.frombuffer(p, numpy.uint32).copy() + if (GREY_PALETTE == p).all(): + extra_dims = [] + return numpy.dtype(dtype), extra_dims + [w, h], bpp + # Convert bitmap and call this method again + newbitmap = lib.FreeImage_ConvertTo32Bits(bitmap) + newbitmap = ctypes.c_void_p(newbitmap) + self._set_bitmap(newbitmap) + return self._get_type_and_shape() + elif bpp == 8: + extra_dims = [] + elif bpp == 24: + extra_dims = [3] + elif bpp == 32: + extra_dims = [4] + else: # pragma: no cover + # raise ValueError('Cannot convert %d BPP bitmap' % bpp) + # Convert bitmap and call this method again + newbitmap = lib.FreeImage_ConvertTo32Bits(bitmap) + newbitmap = ctypes.c_void_p(newbitmap) + self._set_bitmap(newbitmap) + return self._get_type_and_shape() + else: + extra_dims = FI_TYPES.extra_dims[fi_type] + + # Return dtype and shape + return numpy.dtype(dtype), extra_dims + [w, h], bpp + + def quantize(self, quantizer=0, palettesize=256): + """Quantize the bitmap to make it 8-bit (paletted). Returns a new + FIBitmap object. + Only for 24 bit images. + """ + with self._fi as lib: + # New bitmap + bitmap = lib.FreeImage_ColorQuantizeEx( + self._bitmap, quantizer, palettesize, 0, None + ) + bitmap = ctypes.c_void_p(bitmap) + + # Check and return + if not bitmap: + raise ValueError( + 'Could not quantize bitmap "%s": %s' + % (self._filename, self._fi._get_error_message()) + ) + + new = FIBitmap(self._fi, self._filename, self._ftype, self._flags) + new._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap)) + new._fi_type = self._fi_type + return new + + +# def convert_to_32bit(self): +# """ Convert to 32bit image. +# """ +# with self._fi as lib: +# # New bitmap +# bitmap = lib.FreeImage_ConvertTo32Bits(self._bitmap) +# bitmap = ctypes.c_void_p(bitmap) +# +# # Check and return +# if not bitmap: +# raise ValueError('Could not convert bitmap to 32bit "%s": %s' % +# (self._filename, +# self._fi._get_error_message())) +# else: +# new = FIBitmap(self._fi, self._filename, self._ftype, +# self._flags) +# new._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap)) +# new._fi_type = self._fi_type +# return new + + +class FIMultipageBitmap(FIBaseBitmap): + """Wrapper for the multipage FI bitmap object.""" + + def load_from_filename(self, filename=None): + if filename is None: # pragma: no cover + filename = self._filename + + # Prepare + create_new = False + read_only = True + keep_cache_in_memory = False + + # Try opening + with self._fi as lib: + # Create bitmap + multibitmap = lib.FreeImage_OpenMultiBitmap( + self._ftype, + efn(filename), + create_new, + read_only, + keep_cache_in_memory, + self._flags, + ) + multibitmap = ctypes.c_void_p(multibitmap) + + # Check + if not multibitmap: # pragma: no cover + err = self._fi._get_error_message() + raise ValueError( + 'Could not open file "%s" as multi-image: %s' + % (self._filename, err) + ) + self._set_bitmap(multibitmap, (lib.FreeImage_CloseMultiBitmap, multibitmap)) + + # def load_from_bytes(self, bb): + # with self._fi as lib: + # # Create bitmap + # fimemory = lib.FreeImage_OpenMemory( + # ctypes.c_char_p(bb), len(bb)) + # multibitmap = lib.FreeImage_LoadMultiBitmapFromMemory( + # self._ftype, ctypes.c_void_p(fimemory), self._flags) + # multibitmap = ctypes.c_void_p(multibitmap) + # #lib.FreeImage_CloseMemory(ctypes.c_void_p(fimemory)) + # self._mem = fimemory + # self._bytes = bb + # # Check + # if not multibitmap: + # raise ValueError('Could not load multibitmap "%s": %s' + # % (self._filename, self._fi._get_error_message())) + # else: + # self._set_bitmap(multibitmap, + # (lib.FreeImage_CloseMultiBitmap, multibitmap)) + + def save_to_filename(self, filename=None): + if filename is None: # pragma: no cover + filename = self._filename + + # Prepare + create_new = True + read_only = False + keep_cache_in_memory = False + + # Open the file + # todo: Set flags at close func + with self._fi as lib: + multibitmap = lib.FreeImage_OpenMultiBitmap( + self._ftype, + efn(filename), + create_new, + read_only, + keep_cache_in_memory, + 0, + ) + multibitmap = ctypes.c_void_p(multibitmap) + + # Check + if not multibitmap: # pragma: no cover + msg = 'Could not open file "%s" for writing multi-image: %s' % ( + self._filename, + self._fi._get_error_message(), + ) + raise ValueError(msg) + self._set_bitmap(multibitmap, (lib.FreeImage_CloseMultiBitmap, multibitmap)) + + def __len__(self): + with self._fi as lib: + return lib.FreeImage_GetPageCount(self._bitmap) + + def get_page(self, index): + """Return the sub-bitmap for the given page index. + Please close the returned bitmap when done. + """ + with self._fi as lib: + # Create low-level bitmap in freeimage + bitmap = lib.FreeImage_LockPage(self._bitmap, index) + bitmap = ctypes.c_void_p(bitmap) + if not bitmap: # pragma: no cover + raise ValueError( + "Could not open sub-image %i in %r: %s" + % (index, self._filename, self._fi._get_error_message()) + ) + + # Get bitmap object to wrap this bitmap + bm = FIBitmap(self._fi, self._filename, self._ftype, self._flags) + bm._set_bitmap( + bitmap, (lib.FreeImage_UnlockPage, self._bitmap, bitmap, False) + ) + return bm + + def append_bitmap(self, bitmap): + """Add a sub-bitmap to the multi-page bitmap.""" + with self._fi as lib: + # no return value + lib.FreeImage_AppendPage(self._bitmap, bitmap._bitmap) + + +# Create instance +fi = Freeimage() diff --git a/.venv/Lib/site-packages/imageio/plugins/_swf.py b/.venv/Lib/site-packages/imageio/plugins/_swf.py new file mode 100644 index 00000000..98ca3a4b --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/_swf.py @@ -0,0 +1,897 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. +# This code was taken from https://github.com/almarklein/visvis/blob/master/vvmovie/images2swf.py + +# styletest: ignore E261 + +""" +Provides a function (write_swf) to store a series of numpy arrays in an +SWF movie, that can be played on a wide range of OS's. + +In desperation of wanting to share animated images, and then lacking a good +writer for animated gif or .avi, I decided to look into SWF. This format +is very well documented. + +This is a pure python module to create an SWF file that shows a series +of images. The images are stored using the DEFLATE algorithm (same as +PNG and ZIP and which is included in the standard Python distribution). +As this compression algorithm is much more effective than that used in +GIF images, we obtain better quality (24 bit colors + alpha channel) +while still producesing smaller files (a test showed ~75%). Although +SWF also allows for JPEG compression, doing so would probably require +a third party library for the JPEG encoding/decoding, we could +perhaps do this via Pillow or freeimage. + +sources and tools: + +- SWF on wikipedia +- Adobes "SWF File Format Specification" version 10 + (http://www.adobe.com/devnet/swf/pdf/swf_file_format_spec_v10.pdf) +- swftools (swfdump in specific) for debugging +- iwisoft swf2avi can be used to convert swf to avi/mpg/flv with really + good quality, while file size is reduced with factors 20-100. + A good program in my opinion. The free version has the limitation + of a watermark in the upper left corner. + +""" + +import os +import zlib +import time # noqa +import logging + +import numpy as np + + +logger = logging.getLogger(__name__) + +# todo: use Pillow to support reading JPEG images from SWF? + + +# Base functions and classes + + +class BitArray: + """Dynamic array of bits that automatically resizes + with factors of two. + Append bits using .append() or += + You can reverse bits using .reverse() + """ + + def __init__(self, initvalue=None): + self.data = np.zeros((16,), dtype=np.uint8) + self._len = 0 + if initvalue is not None: + self.append(initvalue) + + def __len__(self): + return self._len # self.data.shape[0] + + def __repr__(self): + return self.data[: self._len].tobytes().decode("ascii") + + def _checkSize(self): + # check length... grow if necessary + arraylen = self.data.shape[0] + if self._len >= arraylen: + tmp = np.zeros((arraylen * 2,), dtype=np.uint8) + tmp[: self._len] = self.data[: self._len] + self.data = tmp + + def __add__(self, value): + self.append(value) + return self + + def append(self, bits): + # check input + if isinstance(bits, BitArray): + bits = str(bits) + if isinstance(bits, int): # pragma: no cover - we dont use it + bits = str(bits) + if not isinstance(bits, str): # pragma: no cover + raise ValueError("Append bits as strings or integers!") + + # add bits + for bit in bits: + self.data[self._len] = ord(bit) + self._len += 1 + self._checkSize() + + def reverse(self): + """In-place reverse.""" + tmp = self.data[: self._len].copy() + self.data[: self._len] = tmp[::-1] + + def tobytes(self): + """Convert to bytes. If necessary, + zeros are padded to the end (right side). + """ + bits = str(self) + + # determine number of bytes + nbytes = 0 + while nbytes * 8 < len(bits): + nbytes += 1 + # pad + bits = bits.ljust(nbytes * 8, "0") + + # go from bits to bytes + bb = bytes() + for i in range(nbytes): + tmp = int(bits[i * 8 : (i + 1) * 8], 2) + bb += int2uint8(tmp) + + # done + return bb + + +def int2uint32(i): + return int(i).to_bytes(4, "little") + + +def int2uint16(i): + return int(i).to_bytes(2, "little") + + +def int2uint8(i): + return int(i).to_bytes(1, "little") + + +def int2bits(i, n=None): + """convert int to a string of bits (0's and 1's in a string), + pad to n elements. Convert back using int(ss,2).""" + ii = i + + # make bits + bb = BitArray() + while ii > 0: + bb += str(ii % 2) + ii = ii >> 1 + bb.reverse() + + # justify + if n is not None: + if len(bb) > n: # pragma: no cover + raise ValueError("int2bits fail: len larger than padlength.") + bb = str(bb).rjust(n, "0") + + # done + return BitArray(bb) + + +def bits2int(bb, n=8): + # Init + value = "" + + # Get value in bits + for i in range(len(bb)): + b = bb[i : i + 1] + tmp = bin(ord(b))[2:] + # value += tmp.rjust(8,'0') + value = tmp.rjust(8, "0") + value + + # Make decimal + return int(value[:n], 2) + + +def get_type_and_len(bb): + """bb should be 6 bytes at least + Return (type, length, length_of_full_tag) + """ + # Init + value = "" + + # Get first 16 bits + for i in range(2): + b = bb[i : i + 1] + tmp = bin(ord(b))[2:] + # value += tmp.rjust(8,'0') + value = tmp.rjust(8, "0") + value + + # Get type and length + type = int(value[:10], 2) + L = int(value[10:], 2) + L2 = L + 2 + + # Long tag header? + if L == 63: # '111111' + value = "" + for i in range(2, 6): + b = bb[i : i + 1] # becomes a single-byte bytes() + tmp = bin(ord(b))[2:] + # value += tmp.rjust(8,'0') + value = tmp.rjust(8, "0") + value + L = int(value, 2) + L2 = L + 6 + + # Done + return type, L, L2 + + +def signedint2bits(i, n=None): + """convert signed int to a string of bits (0's and 1's in a string), + pad to n elements. Negative numbers are stored in 2's complement bit + patterns, thus positive numbers always start with a 0. + """ + + # negative number? + ii = i + if i < 0: + # A negative number, -n, is represented as the bitwise opposite of + ii = abs(ii) - 1 # the positive-zero number n-1. + + # make bits + bb = BitArray() + while ii > 0: + bb += str(ii % 2) + ii = ii >> 1 + bb.reverse() + + # justify + bb = "0" + str(bb) # always need the sign bit in front + if n is not None: + if len(bb) > n: # pragma: no cover + raise ValueError("signedint2bits fail: len larger than padlength.") + bb = bb.rjust(n, "0") + + # was it negative? (then opposite bits) + if i < 0: + bb = bb.replace("0", "x").replace("1", "0").replace("x", "1") + + # done + return BitArray(bb) + + +def twits2bits(arr): + """Given a few (signed) numbers, store them + as compactly as possible in the wat specifief by the swf format. + The numbers are multiplied by 20, assuming they + are twits. + Can be used to make the RECT record. + """ + + # first determine length using non justified bit strings + maxlen = 1 + for i in arr: + tmp = len(signedint2bits(i * 20)) + if tmp > maxlen: + maxlen = tmp + + # build array + bits = int2bits(maxlen, 5) + for i in arr: + bits += signedint2bits(i * 20, maxlen) + + return bits + + +def floats2bits(arr): + """Given a few (signed) numbers, convert them to bits, + stored as FB (float bit values). We always use 16.16. + Negative numbers are not (yet) possible, because I don't + know how the're implemented (ambiguity). + """ + bits = int2bits(31, 5) # 32 does not fit in 5 bits! + for i in arr: + if i < 0: # pragma: no cover + raise ValueError("Dit not implement negative floats!") + i1 = int(i) + i2 = i - i1 + bits += int2bits(i1, 15) + bits += int2bits(i2 * 2**16, 16) + return bits + + +# Base Tag + + +class Tag: + def __init__(self): + self.bytes = bytes() + self.tagtype = -1 + + def process_tag(self): + """Implement this to create the tag.""" + raise NotImplementedError() + + def get_tag(self): + """Calls processTag and attaches the header.""" + self.process_tag() + + # tag to binary + bits = int2bits(self.tagtype, 10) + + # complete header uint16 thing + bits += "1" * 6 # = 63 = 0x3f + # make uint16 + bb = int2uint16(int(str(bits), 2)) + + # now add 32bit length descriptor + bb += int2uint32(len(self.bytes)) + + # done, attach and return + bb += self.bytes + return bb + + def make_rect_record(self, xmin, xmax, ymin, ymax): + """Simply uses makeCompactArray to produce + a RECT Record.""" + return twits2bits([xmin, xmax, ymin, ymax]) + + def make_matrix_record(self, scale_xy=None, rot_xy=None, trans_xy=None): + # empty matrix? + if scale_xy is None and rot_xy is None and trans_xy is None: + return "0" * 8 + + # init + bits = BitArray() + + # scale + if scale_xy: + bits += "1" + bits += floats2bits([scale_xy[0], scale_xy[1]]) + else: + bits += "0" + + # rotation + if rot_xy: + bits += "1" + bits += floats2bits([rot_xy[0], rot_xy[1]]) + else: + bits += "0" + + # translation (no flag here) + if trans_xy: + bits += twits2bits([trans_xy[0], trans_xy[1]]) + else: + bits += twits2bits([0, 0]) + + # done + return bits + + +# Control tags + + +class ControlTag(Tag): + def __init__(self): + Tag.__init__(self) + + +class FileAttributesTag(ControlTag): + def __init__(self): + ControlTag.__init__(self) + self.tagtype = 69 + + def process_tag(self): + self.bytes = "\x00".encode("ascii") * (1 + 3) + + +class ShowFrameTag(ControlTag): + def __init__(self): + ControlTag.__init__(self) + self.tagtype = 1 + + def process_tag(self): + self.bytes = bytes() + + +class SetBackgroundTag(ControlTag): + """Set the color in 0-255, or 0-1 (if floats given).""" + + def __init__(self, *rgb): + self.tagtype = 9 + if len(rgb) == 1: + rgb = rgb[0] + self.rgb = rgb + + def process_tag(self): + bb = bytes() + for i in range(3): + clr = self.rgb[i] + if isinstance(clr, float): # pragma: no cover - not used + clr = clr * 255 + bb += int2uint8(clr) + self.bytes = bb + + +class DoActionTag(Tag): + def __init__(self, action="stop"): + Tag.__init__(self) + self.tagtype = 12 + self.actions = [action] + + def append(self, action): # pragma: no cover - not used + self.actions.append(action) + + def process_tag(self): + bb = bytes() + + for action in self.actions: + action = action.lower() + if action == "stop": + bb += "\x07".encode("ascii") + elif action == "play": # pragma: no cover - not used + bb += "\x06".encode("ascii") + else: # pragma: no cover + logger.warning("unknown action: %s" % action) + + bb += int2uint8(0) + self.bytes = bb + + +# Definition tags +class DefinitionTag(Tag): + counter = 0 # to give automatically id's + + def __init__(self): + Tag.__init__(self) + DefinitionTag.counter += 1 + self.id = DefinitionTag.counter # id in dictionary + + +class BitmapTag(DefinitionTag): + def __init__(self, im): + DefinitionTag.__init__(self) + self.tagtype = 36 # DefineBitsLossless2 + + # convert image (note that format is ARGB) + # even a grayscale image is stored in ARGB, nevertheless, + # the fabilous deflate compression will make it that not much + # more data is required for storing (25% or so, and less than 10% + # when storing RGB as ARGB). + + if len(im.shape) == 3: + if im.shape[2] in [3, 4]: + tmp = np.ones((im.shape[0], im.shape[1], 4), dtype=np.uint8) * 255 + for i in range(3): + tmp[:, :, i + 1] = im[:, :, i] + if im.shape[2] == 4: + tmp[:, :, 0] = im[:, :, 3] # swap channel where alpha is + else: # pragma: no cover + raise ValueError("Invalid shape to be an image.") + + elif len(im.shape) == 2: + tmp = np.ones((im.shape[0], im.shape[1], 4), dtype=np.uint8) * 255 + for i in range(3): + tmp[:, :, i + 1] = im[:, :] + else: # pragma: no cover + raise ValueError("Invalid shape to be an image.") + + # we changed the image to uint8 4 channels. + # now compress! + self._data = zlib.compress(tmp.tobytes(), zlib.DEFLATED) + self.imshape = im.shape + + def process_tag(self): + # build tag + bb = bytes() + bb += int2uint16(self.id) # CharacterID + bb += int2uint8(5) # BitmapFormat + bb += int2uint16(self.imshape[1]) # BitmapWidth + bb += int2uint16(self.imshape[0]) # BitmapHeight + bb += self._data # ZlibBitmapData + + self.bytes = bb + + +class PlaceObjectTag(ControlTag): + def __init__(self, depth, idToPlace=None, xy=(0, 0), move=False): + ControlTag.__init__(self) + self.tagtype = 26 + self.depth = depth + self.idToPlace = idToPlace + self.xy = xy + self.move = move + + def process_tag(self): + # retrieve stuff + depth = self.depth + xy = self.xy + id = self.idToPlace + + # build PlaceObject2 + bb = bytes() + if self.move: + bb += "\x07".encode("ascii") + else: + # (8 bit flags): 4:matrix, 2:character, 1:move + bb += "\x06".encode("ascii") + bb += int2uint16(depth) # Depth + bb += int2uint16(id) # character id + bb += self.make_matrix_record(trans_xy=xy).tobytes() # MATRIX record + self.bytes = bb + + +class ShapeTag(DefinitionTag): + def __init__(self, bitmapId, xy, wh): + DefinitionTag.__init__(self) + self.tagtype = 2 + self.bitmapId = bitmapId + self.xy = xy + self.wh = wh + + def process_tag(self): + """Returns a defineshape tag. with a bitmap fill""" + + bb = bytes() + bb += int2uint16(self.id) + xy, wh = self.xy, self.wh + tmp = self.make_rect_record(xy[0], wh[0], xy[1], wh[1]) # ShapeBounds + bb += tmp.tobytes() + + # make SHAPEWITHSTYLE structure + + # first entry: FILLSTYLEARRAY with in it a single fill style + bb += int2uint8(1) # FillStyleCount + bb += "\x41".encode("ascii") # FillStyleType (0x41 or 0x43 unsmoothed) + bb += int2uint16(self.bitmapId) # BitmapId + # bb += '\x00' # BitmapMatrix (empty matrix with leftover bits filled) + bb += self.make_matrix_record(scale_xy=(20, 20)).tobytes() + + # # first entry: FILLSTYLEARRAY with in it a single fill style + # bb += int2uint8(1) # FillStyleCount + # bb += '\x00' # solid fill + # bb += '\x00\x00\xff' # color + + # second entry: LINESTYLEARRAY with a single line style + bb += int2uint8(0) # LineStyleCount + # bb += int2uint16(0*20) # Width + # bb += '\x00\xff\x00' # Color + + # third and fourth entry: NumFillBits and NumLineBits (4 bits each) + # I each give them four bits, so 16 styles possible. + bb += "\x44".encode("ascii") + + self.bytes = bb + + # last entries: SHAPERECORDs ... (individual shape records not aligned) + # STYLECHANGERECORD + bits = BitArray() + bits += self.make_style_change_record(0, 1, moveTo=(self.wh[0], self.wh[1])) + # STRAIGHTEDGERECORD 4x + bits += self.make_straight_edge_record(-self.wh[0], 0) + bits += self.make_straight_edge_record(0, -self.wh[1]) + bits += self.make_straight_edge_record(self.wh[0], 0) + bits += self.make_straight_edge_record(0, self.wh[1]) + + # ENDSHAPRECORD + bits += self.make_end_shape_record() + + self.bytes += bits.tobytes() + + # done + # self.bytes = bb + + def make_style_change_record(self, lineStyle=None, fillStyle=None, moveTo=None): + # first 6 flags + # Note that we use FillStyle1. If we don't flash (at least 8) does not + # recognize the frames properly when importing to library. + + bits = BitArray() + bits += "0" # TypeFlag (not an edge record) + bits += "0" # StateNewStyles (only for DefineShape2 and Defineshape3) + if lineStyle: + bits += "1" # StateLineStyle + else: + bits += "0" + if fillStyle: + bits += "1" # StateFillStyle1 + else: + bits += "0" + bits += "0" # StateFillStyle0 + if moveTo: + bits += "1" # StateMoveTo + else: + bits += "0" + + # give information + # todo: nbits for fillStyle and lineStyle is hard coded. + + if moveTo: + bits += twits2bits([moveTo[0], moveTo[1]]) + if fillStyle: + bits += int2bits(fillStyle, 4) + if lineStyle: + bits += int2bits(lineStyle, 4) + + return bits + + def make_straight_edge_record(self, *dxdy): + if len(dxdy) == 1: + dxdy = dxdy[0] + + # determine required number of bits + xbits = signedint2bits(dxdy[0] * 20) + ybits = signedint2bits(dxdy[1] * 20) + nbits = max([len(xbits), len(ybits)]) + + bits = BitArray() + bits += "11" # TypeFlag and StraightFlag + bits += int2bits(nbits - 2, 4) + bits += "1" # GeneralLineFlag + bits += signedint2bits(dxdy[0] * 20, nbits) + bits += signedint2bits(dxdy[1] * 20, nbits) + + # note: I do not make use of vertical/horizontal only lines... + + return bits + + def make_end_shape_record(self): + bits = BitArray() + bits += "0" # TypeFlag: no edge + bits += "0" * 5 # EndOfShape + return bits + + +def read_pixels(bb, i, tagType, L1): + """With pf's seed after the recordheader, reads the pixeldata.""" + + # Get info + charId = bb[i : i + 2] # noqa + i += 2 + format = ord(bb[i : i + 1]) + i += 1 + width = bits2int(bb[i : i + 2], 16) + i += 2 + height = bits2int(bb[i : i + 2], 16) + i += 2 + + # If we can, get pixeldata and make numpy array + if format != 5: + logger.warning("Can only read 24bit or 32bit RGB(A) lossless images.") + else: + # Read byte data + offset = 2 + 1 + 2 + 2 # all the info bits + bb2 = bb[i : i + (L1 - offset)] + + # Decompress and make numpy array + data = zlib.decompress(bb2) + a = np.frombuffer(data, dtype=np.uint8) + + # Set shape + if tagType == 20: + # DefineBitsLossless - RGB data + try: + a.shape = height, width, 3 + except Exception: + # Byte align stuff might cause troubles + logger.warning("Cannot read image due to byte alignment") + if tagType == 36: + # DefineBitsLossless2 - ARGB data + a.shape = height, width, 4 + # Swap alpha channel to make RGBA + b = a + a = np.zeros_like(a) + a[:, :, 0] = b[:, :, 1] + a[:, :, 1] = b[:, :, 2] + a[:, :, 2] = b[:, :, 3] + a[:, :, 3] = b[:, :, 0] + + return a + + +# Last few functions + + +# These are the original public functions, we don't use them, but we +# keep it so that in principle this module can be used stand-alone. + + +def checkImages(images): # pragma: no cover + """checkImages(images) + Check numpy images and correct intensity range etc. + The same for all movie formats. + """ + # Init results + images2 = [] + + for im in images: + if isinstance(im, np.ndarray): + # Check and convert dtype + if im.dtype == np.uint8: + images2.append(im) # Ok + elif im.dtype in [np.float32, np.float64]: + theMax = im.max() + if 128 < theMax < 300: + pass # assume 0:255 + else: + im = im.copy() + im[im < 0] = 0 + im[im > 1] = 1 + im *= 255 + images2.append(im.astype(np.uint8)) + else: + im = im.astype(np.uint8) + images2.append(im) + # Check size + if im.ndim == 2: + pass # ok + elif im.ndim == 3: + if im.shape[2] not in [3, 4]: + raise ValueError("This array can not represent an image.") + else: + raise ValueError("This array can not represent an image.") + else: + raise ValueError("Invalid image type: " + str(type(im))) + + # Done + return images2 + + +def build_file( + fp, taglist, nframes=1, framesize=(500, 500), fps=10, version=8 +): # pragma: no cover + """Give the given file (as bytes) a header.""" + + # compose header + bb = bytes() + bb += "F".encode("ascii") # uncompressed + bb += "WS".encode("ascii") # signature bytes + bb += int2uint8(version) # version + bb += "0000".encode("ascii") # FileLength (leave open for now) + bb += Tag().make_rect_record(0, framesize[0], 0, framesize[1]).tobytes() + bb += int2uint8(0) + int2uint8(fps) # FrameRate + bb += int2uint16(nframes) + fp.write(bb) + + # produce all tags + for tag in taglist: + fp.write(tag.get_tag()) + + # finish with end tag + fp.write("\x00\x00".encode("ascii")) + + # set size + sze = fp.tell() + fp.seek(4) + fp.write(int2uint32(sze)) + + +def write_swf(filename, images, duration=0.1, repeat=True): # pragma: no cover + """Write an swf-file from the specified images. If repeat is False, + the movie is finished with a stop action. Duration may also + be a list with durations for each frame (note that the duration + for each frame is always an integer amount of the minimum duration.) + + Images should be a list consisting numpy arrays with values between + 0 and 255 for integer types, and between 0 and 1 for float types. + + """ + + # Check images + images2 = checkImages(images) + + # Init + taglist = [FileAttributesTag(), SetBackgroundTag(0, 0, 0)] + + # Check duration + if hasattr(duration, "__len__"): + if len(duration) == len(images2): + duration = [d for d in duration] + else: + raise ValueError("len(duration) doesn't match amount of images.") + else: + duration = [duration for im in images2] + + # Build delays list + minDuration = float(min(duration)) + delays = [round(d / minDuration) for d in duration] + delays = [max(1, int(d)) for d in delays] + + # Get FPS + fps = 1.0 / minDuration + + # Produce series of tags for each image + # t0 = time.time() + nframes = 0 + for im in images2: + bm = BitmapTag(im) + wh = (im.shape[1], im.shape[0]) + sh = ShapeTag(bm.id, (0, 0), wh) + po = PlaceObjectTag(1, sh.id, move=nframes > 0) + taglist.extend([bm, sh, po]) + for i in range(delays[nframes]): + taglist.append(ShowFrameTag()) + nframes += 1 + + if not repeat: + taglist.append(DoActionTag("stop")) + + # Build file + # t1 = time.time() + fp = open(filename, "wb") + try: + build_file(fp, taglist, nframes=nframes, framesize=wh, fps=fps) + except Exception: + raise + finally: + fp.close() + # t2 = time.time() + + # logger.warning("Writing SWF took %1.2f and %1.2f seconds" % (t1-t0, t2-t1) ) + + +def read_swf(filename): # pragma: no cover + """Read all images from an SWF (shockwave flash) file. Returns a list + of numpy arrays. + + Limitation: only read the PNG encoded images (not the JPG encoded ones). + """ + + # Check whether it exists + if not os.path.isfile(filename): + raise IOError("File not found: " + str(filename)) + + # Init images + images = [] + + # Open file and read all + fp = open(filename, "rb") + bb = fp.read() + + try: + # Check opening tag + tmp = bb[0:3].decode("ascii", "ignore") + if tmp.upper() == "FWS": + pass # ok + elif tmp.upper() == "CWS": + # Decompress movie + bb = bb[:8] + zlib.decompress(bb[8:]) + else: + raise IOError("Not a valid SWF file: " + str(filename)) + + # Set filepointer at first tag (skipping framesize RECT and two uin16's + i = 8 + nbits = bits2int(bb[i : i + 1], 5) # skip FrameSize + nbits = 5 + nbits * 4 + Lrect = nbits / 8.0 + if Lrect % 1: + Lrect += 1 + Lrect = int(Lrect) + i += Lrect + 4 + + # Iterate over the tags + counter = 0 + while True: + counter += 1 + + # Get tag header + head = bb[i : i + 6] + if not head: + break # Done (we missed end tag) + + # Determine type and length + T, L1, L2 = get_type_and_len(head) + if not L2: + logger.warning("Invalid tag length, could not proceed") + break + # logger.warning(T, L2) + + # Read image if we can + if T in [20, 36]: + im = read_pixels(bb, i + 6, T, L1) + if im is not None: + images.append(im) + elif T in [6, 21, 35, 90]: + logger.warning("Ignoring JPEG image: cannot read JPEG.") + else: + pass # Not an image tag + + # Detect end tag + if T == 0: + break + + # Next tag! + i += L2 + + finally: + fp.close() + + # Done + return images + + +# Backward compatibility; same public names as when this was images2swf. +writeSwf = write_swf +readSwf = read_swf diff --git a/.venv/Lib/site-packages/imageio/plugins/_tifffile.py b/.venv/Lib/site-packages/imageio/plugins/_tifffile.py new file mode 100644 index 00000000..bcdf728d --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/_tifffile.py @@ -0,0 +1,10675 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# tifffile.py + +# Copyright (c) 2008-2018, Christoph Gohlke +# Copyright (c) 2008-2018, The Regents of the University of California +# Produced at the Laboratory for Fluorescence Dynamics +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holders nor the names of any +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Read image and meta data from (bio) TIFF(R) files. Save numpy arrays as TIFF. + +Image and metadata can be read from TIFF, BigTIFF, OME-TIFF, STK, LSM, NIH, +SGI, ImageJ, MicroManager, FluoView, ScanImage, SEQ, GEL, and GeoTIFF files. + +Tifffile is not a general-purpose TIFF library. +Only a subset of the TIFF specification is supported, mainly uncompressed and +losslessly compressed 1, 8, 16, 32 and 64 bit integer, 16, 32 and 64-bit float, +grayscale and RGB(A) images, which are commonly used in scientific imaging. +Specifically, reading slices of image data, image trees defined via SubIFDs, +CCITT and OJPEG compression, chroma subsampling without JPEG compression, +or IPTC and XMP metadata are not implemented. + +TIFF(R), the tagged Image File Format, is a trademark and under control of +Adobe Systems Incorporated. BigTIFF allows for files greater than 4 GB. +STK, LSM, FluoView, SGI, SEQ, GEL, and OME-TIFF, are custom extensions +defined by Molecular Devices (Universal Imaging Corporation), Carl Zeiss +MicroImaging, Olympus, Silicon Graphics International, Media Cybernetics, +Molecular Dynamics, and the Open Microscopy Environment consortium +respectively. + +For command line usage run C{python -m tifffile --help} + +:Author: + `Christoph Gohlke `_ + +:Organization: + Laboratory for Fluorescence Dynamics, University of California, Irvine + +:Version: 2018.06.15 + +Requirements +------------ +* `CPython 3.6 64-bit `_ +* `Numpy 1.14 `_ +* `Matplotlib 2.2 `_ (optional for plotting) +* `Tifffile.c 2018.02.10 `_ + (recommended for faster decoding of PackBits and LZW encoded strings) +* `Tifffile_geodb.py 2018.02.10 `_ + (optional enums for GeoTIFF metadata) +* Python 2 requires 'futures', 'enum34', 'pathlib'. + +Revisions +--------- +2018.06.15 + Pass 2680 tests. + Towards reading JPEG and other compressions via imagecodecs package (WIP). + Add function to validate TIFF using 'jhove -m TIFF-hul'. + Save bool arrays as bilevel TIFF. + Accept pathlib.Path as filenames. + Move 'software' argument from TiffWriter __init__ to save. + Raise DOS limit to 16 TB. + Lazy load lzma and zstd compressors and decompressors. + Add option to save IJMetadata tags. + Return correct number of pages for truncated series (bug fix). + Move EXIF tags to TIFF.TAG as per TIFF/EP standard. +2018.02.18 + Pass 2293 tests. + Always save RowsPerStrip and Resolution tags as required by TIFF standard. + Do not use badly typed ImageDescription. + Coherce bad ASCII string tags to bytes. + Tuning of __str__ functions. + Fix reading 'undefined' tag values (bug fix). + Read and write ZSTD compressed data. + Use hexdump to print byte strings. + Determine TIFF byte order from data dtype in imsave. + Add option to specify RowsPerStrip for compressed strips. + Allow memory map of arrays with non-native byte order. + Attempt to handle ScanImage <= 5.1 files. + Restore TiffPageSeries.pages sequence interface. + Use numpy.frombuffer instead of fromstring to read from binary data. + Parse GeoTIFF metadata. + Add option to apply horizontal differencing before compression. + Towards reading PerkinElmer QPTIFF (no test files). + Do not index out of bounds data in tifffile.c unpackbits and decodelzw. +2017.09.29 (tentative) + Many backwards incompatible changes improving speed and resource usage: + Pass 2268 tests. + Add detail argument to __str__ function. Remove info functions. + Fix potential issue correcting offsets of large LSM files with positions. + Remove TiffFile sequence interface; use TiffFile.pages instead. + Do not make tag values available as TiffPage attributes. + Use str (not bytes) type for tag and metadata strings (WIP). + Use documented standard tag and value names (WIP). + Use enums for some documented TIFF tag values. + Remove 'memmap' and 'tmpfile' options; use out='memmap' instead. + Add option to specify output in asarray functions. + Add option to concurrently decode image strips or tiles using threads. + Add TiffPage.asrgb function (WIP). + Do not apply colormap in asarray. + Remove 'colormapped', 'rgbonly', and 'scale_mdgel' options from asarray. + Consolidate metadata in TiffFile _metadata functions. + Remove non-tag metadata properties from TiffPage. + Add function to convert LSM to tiled BIN files. + Align image data in file. + Make TiffPage.dtype a numpy.dtype. + Add 'ndim' and 'size' properties to TiffPage and TiffPageSeries. + Allow imsave to write non-BigTIFF files up to ~4 GB. + Only read one page for shaped series if possible. + Add memmap function to create memory-mapped array stored in TIFF file. + Add option to save empty arrays to TIFF files. + Add option to save truncated TIFF files. + Allow single tile images to be saved contiguously. + Add optional movie mode for files with uniform pages. + Lazy load pages. + Use lightweight TiffFrame for IFDs sharing properties with key TiffPage. + Move module constants to 'TIFF' namespace (speed up module import). + Remove 'fastij' option from TiffFile. + Remove 'pages' parameter from TiffFile. + Remove TIFFfile alias. + Deprecate Python 2. + Require enum34 and futures packages on Python 2.7. + Remove Record class and return all metadata as dict instead. + Add functions to parse STK, MetaSeries, ScanImage, SVS, Pilatus metadata. + Read tags from EXIF and GPS IFDs. + Use pformat for tag and metadata values. + Fix reading some UIC tags (bug fix). + Do not modify input array in imshow (bug fix). + Fix Python implementation of unpack_ints. +2017.05.23 + Pass 1961 tests. + Write correct number of SampleFormat values (bug fix). + Use Adobe deflate code to write ZIP compressed files. + Add option to pass tag values as packed binary data for writing. + Defer tag validation to attribute access. + Use property instead of lazyattr decorator for simple expressions. +2017.03.17 + Write IFDs and tag values on word boundaries. + Read ScanImage metadata. + Remove is_rgb and is_indexed attributes from TiffFile. + Create files used by doctests. +2017.01.12 + Read Zeiss SEM metadata. + Read OME-TIFF with invalid references to external files. + Rewrite C LZW decoder (5x faster). + Read corrupted LSM files missing EOI code in LZW stream. +2017.01.01 + Add option to append images to existing TIFF files. + Read files without pages. + Read S-FEG and Helios NanoLab tags created by FEI software. + Allow saving Color Filter Array (CFA) images. + Add info functions returning more information about TiffFile and TiffPage. + Add option to read specific pages only. + Remove maxpages argument (backwards incompatible). + Remove test_tifffile function. +2016.10.28 + Pass 1944 tests. + Improve detection of ImageJ hyperstacks. + Read TVIPS metadata created by EM-MENU (by Marco Oster). + Add option to disable using OME-XML metadata. + Allow non-integer range attributes in modulo tags (by Stuart Berg). +2016.06.21 + Do not always memmap contiguous data in page series. +2016.05.13 + Add option to specify resolution unit. + Write grayscale images with extra samples when planarconfig is specified. + Do not write RGB color images with 2 samples. + Reorder TiffWriter.save keyword arguments (backwards incompatible). +2016.04.18 + Pass 1932 tests. + TiffWriter, imread, and imsave accept open binary file streams. +2016.04.13 + Correctly handle reversed fill order in 2 and 4 bps images (bug fix). + Implement reverse_bitorder in C. +2016.03.18 + Fix saving additional ImageJ metadata. +2016.02.22 + Pass 1920 tests. + Write 8 bytes double tag values using offset if necessary (bug fix). + Add option to disable writing second image description tag. + Detect tags with incorrect counts. + Disable color mapping for LSM. +2015.11.13 + Read LSM 6 mosaics. + Add option to specify directory of memory-mapped files. + Add command line options to specify vmin and vmax values for colormapping. +2015.10.06 + New helper function to apply colormaps. + Renamed is_palette attributes to is_indexed (backwards incompatible). + Color-mapped samples are now contiguous (backwards incompatible). + Do not color-map ImageJ hyperstacks (backwards incompatible). + Towards reading Leica SCN. +2015.09.25 + Read images with reversed bit order (FillOrder is LSB2MSB). +2015.09.21 + Read RGB OME-TIFF. + Warn about malformed OME-XML. +2015.09.16 + Detect some corrupted ImageJ metadata. + Better axes labels for 'shaped' files. + Do not create TiffTag for default values. + Chroma subsampling is not supported. + Memory-map data in TiffPageSeries if possible (optional). +2015.08.17 + Pass 1906 tests. + Write ImageJ hyperstacks (optional). + Read and write LZMA compressed data. + Specify datetime when saving (optional). + Save tiled and color-mapped images (optional). + Ignore void bytecounts and offsets if possible. + Ignore bogus image_depth tag created by ISS Vista software. + Decode floating point horizontal differencing (not tiled). + Save image data contiguously if possible. + Only read first IFD from ImageJ files if possible. + Read ImageJ 'raw' format (files larger than 4 GB). + TiffPageSeries class for pages with compatible shape and data type. + Try to read incomplete tiles. + Open file dialog if no filename is passed on command line. + Ignore errors when decoding OME-XML. + Rename decoder functions (backwards incompatible). +2014.08.24 + TiffWriter class for incremental writing images. + Simplify examples. +2014.08.19 + Add memmap function to FileHandle. + Add function to determine if image data in TiffPage is memory-mappable. + Do not close files if multifile_close parameter is False. +2014.08.10 + Pass 1730 tests. + Return all extrasamples by default (backwards incompatible). + Read data from series of pages into memory-mapped array (optional). + Squeeze OME dimensions (backwards incompatible). + Workaround missing EOI code in strips. + Support image and tile depth tags (SGI extension). + Better handling of STK/UIC tags (backwards incompatible). + Disable color mapping for STK. + Julian to datetime converter. + TIFF ASCII type may be NULL separated. + Unwrap strip offsets for LSM files greater than 4 GB. + Correct strip byte counts in compressed LSM files. + Skip missing files in OME series. + Read embedded TIFF files. +2014.02.05 + Save rational numbers as type 5 (bug fix). +2013.12.20 + Keep other files in OME multi-file series closed. + FileHandle class to abstract binary file handle. + Disable color mapping for bad OME-TIFF produced by bio-formats. + Read bad OME-XML produced by ImageJ when cropping. +2013.11.03 + Allow zlib compress data in imsave function (optional). + Memory-map contiguous image data (optional). +2013.10.28 + Read MicroManager metadata and little-endian ImageJ tag. + Save extra tags in imsave function. + Save tags in ascending order by code (bug fix). +2012.10.18 + Accept file like objects (read from OIB files). +2012.08.21 + Rename TIFFfile to TiffFile and TIFFpage to TiffPage. + TiffSequence class for reading sequence of TIFF files. + Read UltraQuant tags. + Allow float numbers as resolution in imsave function. +2012.08.03 + Read MD GEL tags and NIH Image header. +2012.07.25 + Read ImageJ tags. + ... + +Notes +----- +The API is not stable yet and might change between revisions. + +Tested on little-endian platforms only. + +Other Python packages and modules for reading (bio) scientific TIFF files: + +* `python-bioformats `_ +* `Imread `_ +* `PyLibTiff `_ +* `ITK `_ +* `PyLSM `_ +* `PyMca.TiffIO.py `_ (same as fabio.TiffIO) +* `BioImageXD.Readers `_ +* `Cellcognition.io `_ +* `pymimage `_ +* `pytiff `_ + +Acknowledgements +---------------- +* Egor Zindy, University of Manchester, for lsm_scan_info specifics. +* Wim Lewis for a bug fix and some LSM functions. +* Hadrien Mary for help on reading MicroManager files. +* Christian Kliche for help writing tiled and color-mapped files. + +References +---------- +1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated. + http://partners.adobe.com/public/developer/tiff/ +2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html +3) MetaMorph Stack (STK) Image File Format. + http://support.meta.moleculardevices.com/docs/t10243.pdf +4) Image File Format Description LSM 5/7 Release 6.0 (ZEN 2010). + Carl Zeiss MicroImaging GmbH. BioSciences. May 10, 2011 +5) The OME-TIFF format. + http://www.openmicroscopy.org/site/support/file-formats/ome-tiff +6) UltraQuant(r) Version 6.0 for Windows Start-Up Guide. + http://www.ultralum.com/images%20ultralum/pdf/UQStart%20Up%20Guide.pdf +7) Micro-Manager File Formats. + http://www.micro-manager.org/wiki/Micro-Manager_File_Formats +8) Tags for TIFF and Related Specifications. Digital Preservation. + http://www.digitalpreservation.gov/formats/content/tiff_tags.shtml +9) ScanImage BigTiff Specification - ScanImage 2016. + http://scanimage.vidriotechnologies.com/display/SI2016/ + ScanImage+BigTiff+Specification +10) CIPA DC-008-2016: Exchangeable image file format for digital still cameras: + Exif Version 2.31. + http://www.cipa.jp/std/documents/e/DC-008-Translation-2016-E.pdf + +Examples +-------- +>>> # write numpy array to TIFF file +>>> data = numpy.random.rand(4, 301, 219) +>>> imsave('temp.tif', data, photometric='minisblack') + +>>> # read numpy array from TIFF file +>>> image = imread('temp.tif') +>>> numpy.testing.assert_array_equal(image, data) + +>>> # iterate over pages and tags in TIFF file +>>> with TiffFile('temp.tif') as tif: +... images = tif.asarray() +... for page in tif.pages: +... for tag in page.tags.values(): +... _ = tag.name, tag.value +... image = page.asarray() + +""" + +from __future__ import division, print_function + +import sys +import os +import io +import re +import glob +import math +import zlib +import time +import json +import enum +import struct +import pathlib +import warnings +import binascii +import tempfile +import datetime +import threading +import collections +import multiprocessing +import concurrent.futures + +import numpy + +# delay imports: mmap, pprint, fractions, xml, tkinter, matplotlib, lzma, zstd, +# subprocess + +__version__ = "2018.06.15" +__docformat__ = "restructuredtext en" +__all__ = ( + "imsave", + "imread", + "imshow", + "memmap", + "TiffFile", + "TiffWriter", + "TiffSequence", + # utility functions used by oiffile or czifile + "FileHandle", + "lazyattr", + "natural_sorted", + "decode_lzw", + "stripnull", + "create_output", + "repeat_nd", + "format_size", + "product", + "xml2dict", +) + + +def imread(files, **kwargs): + """Return image data from TIFF file(s) as numpy array. + + Refer to the TiffFile class and member functions for documentation. + + Parameters + ---------- + files : str, binary stream, or sequence + File name, seekable binary stream, glob pattern, or sequence of + file names. + kwargs : dict + Parameters 'multifile' and 'is_ome' are passed to the TiffFile class. + The 'pattern' parameter is passed to the TiffSequence class. + Other parameters are passed to the asarray functions. + The first image series is returned if no arguments are provided. + + Examples + -------- + >>> # get image from first page + >>> imsave('temp.tif', numpy.random.rand(3, 4, 301, 219)) + >>> im = imread('temp.tif', key=0) + >>> im.shape + (4, 301, 219) + + >>> # get images from sequence of files + >>> ims = imread(['temp.tif', 'temp.tif']) + >>> ims.shape + (2, 3, 4, 301, 219) + + """ + kwargs_file = parse_kwargs(kwargs, "multifile", "is_ome") + kwargs_seq = parse_kwargs(kwargs, "pattern") + + if isinstance(files, basestring) and any(i in files for i in "?*"): + files = glob.glob(files) + if not files: + raise ValueError("no files found") + if not hasattr(files, "seek") and len(files) == 1: + files = files[0] + + if isinstance(files, basestring) or hasattr(files, "seek"): + with TiffFile(files, **kwargs_file) as tif: + return tif.asarray(**kwargs) + else: + with TiffSequence(files, **kwargs_seq) as imseq: + return imseq.asarray(**kwargs) + + +def imsave(file, data=None, shape=None, dtype=None, bigsize=2**32 - 2**25, **kwargs): + """Write numpy array to TIFF file. + + Refer to the TiffWriter class and member functions for documentation. + + Parameters + ---------- + file : str or binary stream + File name or writable binary stream, such as an open file or BytesIO. + data : array_like + Input image. The last dimensions are assumed to be image depth, + height, width, and samples. + If None, an empty array of the specified shape and dtype is + saved to file. + Unless 'byteorder' is specified in 'kwargs', the TIFF file byte order + is determined from the data's dtype or the dtype argument. + shape : tuple + If 'data' is None, shape of an empty array to save to the file. + dtype : numpy.dtype + If 'data' is None, data-type of an empty array to save to the file. + bigsize : int + Create a BigTIFF file if the size of data in bytes is larger than + this threshold and 'imagej' or 'truncate' are not enabled. + By default, the threshold is 4 GB minus 32 MB reserved for metadata. + Use the 'bigtiff' parameter to explicitly specify the type of + file created. + kwargs : dict + Parameters 'append', 'byteorder', 'bigtiff', and 'imagej', are passed + to TiffWriter(). Other parameters are passed to TiffWriter.save(). + + Returns + ------- + If the image data are written contiguously, return offset and bytecount + of image data in the file. + + Examples + -------- + >>> # save a RGB image + >>> data = numpy.random.randint(0, 255, (256, 256, 3), 'uint8') + >>> imsave('temp.tif', data, photometric='rgb') + + >>> # save a random array and metadata, using compression + >>> data = numpy.random.rand(2, 5, 3, 301, 219) + >>> imsave('temp.tif', data, compress=6, metadata={'axes': 'TZCYX'}) + + """ + tifargs = parse_kwargs(kwargs, "append", "bigtiff", "byteorder", "imagej") + if data is None: + size = product(shape) * numpy.dtype(dtype).itemsize + byteorder = numpy.dtype(dtype).byteorder + else: + try: + size = data.nbytes + byteorder = data.dtype.byteorder + except Exception: + size = 0 + byteorder = None + if ( + size > bigsize + and "bigtiff" not in tifargs + and not (tifargs.get("imagej", False) or tifargs.get("truncate", False)) + ): + tifargs["bigtiff"] = True + if "byteorder" not in tifargs: + tifargs["byteorder"] = byteorder + + with TiffWriter(file, **tifargs) as tif: + return tif.save(data, shape, dtype, **kwargs) + + +def memmap(filename, shape=None, dtype=None, page=None, series=0, mode="r+", **kwargs): + """Return memory-mapped numpy array stored in TIFF file. + + Memory-mapping requires data stored in native byte order, without tiling, + compression, predictors, etc. + If 'shape' and 'dtype' are provided, existing files will be overwritten or + appended to depending on the 'append' parameter. + Otherwise the image data of a specified page or series in an existing + file will be memory-mapped. By default, the image data of the first page + series is memory-mapped. + Call flush() to write any changes in the array to the file. + Raise ValueError if the image data in the file is not memory-mappable. + + Parameters + ---------- + filename : str + Name of the TIFF file which stores the array. + shape : tuple + Shape of the empty array. + dtype : numpy.dtype + Data-type of the empty array. + page : int + Index of the page which image data to memory-map. + series : int + Index of the page series which image data to memory-map. + mode : {'r+', 'r', 'c'}, optional + The file open mode. Default is to open existing file for reading and + writing ('r+'). + kwargs : dict + Additional parameters passed to imsave() or TiffFile(). + + Examples + -------- + >>> # create an empty TIFF file and write to memory-mapped image + >>> im = memmap('temp.tif', shape=(256, 256), dtype='float32') + >>> im[255, 255] = 1.0 + >>> im.flush() + >>> im.shape, im.dtype + ((256, 256), dtype('float32')) + >>> del im + + >>> # memory-map image data in a TIFF file + >>> im = memmap('temp.tif', page=0) + >>> im[255, 255] + 1.0 + + """ + if shape is not None and dtype is not None: + # create a new, empty array + kwargs.update( + data=None, + shape=shape, + dtype=dtype, + returnoffset=True, + align=TIFF.ALLOCATIONGRANULARITY, + ) + result = imsave(filename, **kwargs) + if result is None: + # TODO: fail before creating file or writing data + raise ValueError("image data are not memory-mappable") + offset = result[0] + else: + # use existing file + with TiffFile(filename, **kwargs) as tif: + if page is not None: + page = tif.pages[page] + if not page.is_memmappable: + raise ValueError("image data are not memory-mappable") + offset, _ = page.is_contiguous + shape = page.shape + dtype = page.dtype + else: + series = tif.series[series] + if series.offset is None: + raise ValueError("image data are not memory-mappable") + shape = series.shape + dtype = series.dtype + offset = series.offset + dtype = tif.byteorder + dtype.char + return numpy.memmap(filename, dtype, mode, offset, shape, "C") + + +class lazyattr(object): + """Attribute whose value is computed on first access.""" + + # TODO: help() doesn't work + __slots__ = ("func",) + + def __init__(self, func): + self.func = func + # self.__name__ = func.__name__ + # self.__doc__ = func.__doc__ + # self.lock = threading.RLock() + + def __get__(self, instance, owner): + # with self.lock: + if instance is None: + return self + try: + value = self.func(instance) + except AttributeError as e: + raise RuntimeError(e) + if value is NotImplemented: + return getattr(super(owner, instance), self.func.__name__) + setattr(instance, self.func.__name__, value) + return value + + +class TiffWriter(object): + """Write numpy arrays to TIFF file. + + TiffWriter instances must be closed using the 'close' method, which is + automatically called when using the 'with' context manager. + + TiffWriter's main purpose is saving nD numpy array's as TIFF, + not to create any possible TIFF format. Specifically, JPEG compression, + SubIFDs, ExifIFD, or GPSIFD tags are not supported. + + Examples + -------- + >>> # successively append images to BigTIFF file + >>> data = numpy.random.rand(2, 5, 3, 301, 219) + >>> with TiffWriter('temp.tif', bigtiff=True) as tif: + ... for i in range(data.shape[0]): + ... tif.save(data[i], compress=6, photometric='minisblack') + + """ + + def __init__(self, file, bigtiff=False, byteorder=None, append=False, imagej=False): + """Open a TIFF file for writing. + + An empty TIFF file is created if the file does not exist, else the + file is overwritten with an empty TIFF file unless 'append' + is true. Use bigtiff=True when creating files larger than 4 GB. + + Parameters + ---------- + file : str, binary stream, or FileHandle + File name or writable binary stream, such as an open file + or BytesIO. + bigtiff : bool + If True, the BigTIFF format is used. + byteorder : {'<', '>', '=', '|'} + The endianness of the data in the file. + By default, this is the system's native byte order. + append : bool + If True and 'file' is an existing standard TIFF file, image data + and tags are appended to the file. + Appending data may corrupt specifically formatted TIFF files + such as LSM, STK, ImageJ, NIH, or FluoView. + imagej : bool + If True, write an ImageJ hyperstack compatible file. + This format can handle data types uint8, uint16, or float32 and + data shapes up to 6 dimensions in TZCYXS order. + RGB images (S=3 or S=4) must be uint8. + ImageJ's default byte order is big-endian but this implementation + uses the system's native byte order by default. + ImageJ does not support BigTIFF format or LZMA compression. + The ImageJ file format is undocumented. + + """ + if append: + # determine if file is an existing TIFF file that can be extended + try: + with FileHandle(file, mode="rb", size=0) as fh: + pos = fh.tell() + try: + with TiffFile(fh) as tif: + if append != "force" and any( + getattr(tif, "is_" + a) + for a in ( + "lsm", + "stk", + "imagej", + "nih", + "fluoview", + "micromanager", + ) + ): + raise ValueError("file contains metadata") + byteorder = tif.byteorder + bigtiff = tif.is_bigtiff + self._ifdoffset = tif.pages.next_page_offset + except Exception as e: + raise ValueError("cannot append to file: %s" % str(e)) + finally: + fh.seek(pos) + except (IOError, FileNotFoundError): + append = False + + if byteorder in (None, "=", "|"): + byteorder = "<" if sys.byteorder == "little" else ">" + elif byteorder not in ("<", ">"): + raise ValueError("invalid byteorder %s" % byteorder) + if imagej and bigtiff: + warnings.warn("writing incompatible BigTIFF ImageJ") + + self._byteorder = byteorder + self._imagej = bool(imagej) + self._truncate = False + self._metadata = None + self._colormap = None + + self._descriptionoffset = 0 + self._descriptionlen = 0 + self._descriptionlenoffset = 0 + self._tags = None + self._shape = None # normalized shape of data in consecutive pages + self._datashape = None # shape of data in consecutive pages + self._datadtype = None # data type + self._dataoffset = None # offset to data + self._databytecounts = None # byte counts per plane + self._tagoffsets = None # strip or tile offset tag code + + if bigtiff: + self._bigtiff = True + self._offsetsize = 8 + self._tagsize = 20 + self._tagnoformat = "Q" + self._offsetformat = "Q" + self._valueformat = "8s" + else: + self._bigtiff = False + self._offsetsize = 4 + self._tagsize = 12 + self._tagnoformat = "H" + self._offsetformat = "I" + self._valueformat = "4s" + + if append: + self._fh = FileHandle(file, mode="r+b", size=0) + self._fh.seek(0, 2) + else: + self._fh = FileHandle(file, mode="wb", size=0) + self._fh.write({"<": b"II", ">": b"MM"}[byteorder]) + if bigtiff: + self._fh.write(struct.pack(byteorder + "HHH", 43, 8, 0)) + else: + self._fh.write(struct.pack(byteorder + "H", 42)) + # first IFD + self._ifdoffset = self._fh.tell() + self._fh.write(struct.pack(byteorder + self._offsetformat, 0)) + + def save( + self, + data=None, + shape=None, + dtype=None, + returnoffset=False, + photometric=None, + planarconfig=None, + tile=None, + contiguous=True, + align=16, + truncate=False, + compress=0, + rowsperstrip=None, + predictor=False, + colormap=None, + description=None, + datetime=None, + resolution=None, + software="tifffile.py", + metadata={}, + ijmetadata=None, + extratags=(), + ): + """Write numpy array and tags to TIFF file. + + The data shape's last dimensions are assumed to be image depth, + height (length), width, and samples. + If a colormap is provided, the data's dtype must be uint8 or uint16 + and the data values are indices into the last dimension of the + colormap. + If 'shape' and 'dtype' are specified, an empty array is saved. + This option cannot be used with compression or multiple tiles. + Image data are written uncompressed in one strip per plane by default. + Dimensions larger than 2 to 4 (depending on photometric mode, planar + configuration, and SGI mode) are flattened and saved as separate pages. + The SampleFormat and BitsPerSample tags are derived from the data type. + + Parameters + ---------- + data : numpy.ndarray or None + Input image array. + shape : tuple or None + Shape of the empty array to save. Used only if 'data' is None. + dtype : numpy.dtype or None + Data-type of the empty array to save. Used only if 'data' is None. + returnoffset : bool + If True and the image data in the file is memory-mappable, return + the offset and number of bytes of the image data in the file. + photometric : {'MINISBLACK', 'MINISWHITE', 'RGB', 'PALETTE', 'CFA'} + The color space of the image data. + By default, this setting is inferred from the data shape and the + value of colormap. + For CFA images, DNG tags must be specified in 'extratags'. + planarconfig : {'CONTIG', 'SEPARATE'} + Specifies if samples are stored contiguous or in separate planes. + By default, this setting is inferred from the data shape. + If this parameter is set, extra samples are used to store grayscale + images. + 'CONTIG': last dimension contains samples. + 'SEPARATE': third last dimension contains samples. + tile : tuple of int + The shape (depth, length, width) of image tiles to write. + If None (default), image data are written in strips. + The tile length and width must be a multiple of 16. + If the tile depth is provided, the SGI ImageDepth and TileDepth + tags are used to save volume data. + Unless a single tile is used, tiles cannot be used to write + contiguous files. + Few software can read the SGI format, e.g. MeVisLab. + contiguous : bool + If True (default) and the data and parameters are compatible with + previous ones, if any, the image data are stored contiguously after + the previous one. Parameters 'photometric' and 'planarconfig' + are ignored. Parameters 'description', datetime', and 'extratags' + are written to the first page of a contiguous series only. + align : int + Byte boundary on which to align the image data in the file. + Default 16. Use mmap.ALLOCATIONGRANULARITY for memory-mapped data. + Following contiguous writes are not aligned. + truncate : bool + If True, only write the first page including shape metadata if + possible (uncompressed, contiguous, not tiled). + Other TIFF readers will only be able to read part of the data. + compress : int or 'LZMA', 'ZSTD' + Values from 0 to 9 controlling the level of zlib compression. + If 0 (default), data are written uncompressed. + Compression cannot be used to write contiguous files. + If 'LZMA' or 'ZSTD', LZMA or ZSTD compression is used, which is + not available on all platforms. + rowsperstrip : int + The number of rows per strip used for compression. + Uncompressed data are written in one strip per plane. + predictor : bool + If True, apply horizontal differencing to integer type images + before compression. + colormap : numpy.ndarray + RGB color values for the corresponding data value. + Must be of shape (3, 2**(data.itemsize*8)) and dtype uint16. + description : str + The subject of the image. Must be 7-bit ASCII. Cannot be used with + the ImageJ format. Saved with the first page only. + datetime : datetime + Date and time of image creation in '%Y:%m:%d %H:%M:%S' format. + If None (default), the current date and time is used. + Saved with the first page only. + resolution : (float, float[, str]) or ((int, int), (int, int)[, str]) + X and Y resolutions in pixels per resolution unit as float or + rational numbers. A third, optional parameter specifies the + resolution unit, which must be None (default for ImageJ), + 'INCH' (default), or 'CENTIMETER'. + software : str + Name of the software used to create the file. Must be 7-bit ASCII. + Saved with the first page only. + metadata : dict + Additional meta data to be saved along with shape information + in JSON or ImageJ formats in an ImageDescription tag. + If None, do not write a second ImageDescription tag. + Strings must be 7-bit ASCII. Saved with the first page only. + ijmetadata : dict + Additional meta data to be saved in application specific + IJMetadata and IJMetadataByteCounts tags. Refer to the + imagej_metadata_tags function for valid keys and values. + Saved with the first page only. + extratags : sequence of tuples + Additional tags as [(code, dtype, count, value, writeonce)]. + + code : int + The TIFF tag Id. + dtype : str + Data type of items in 'value' in Python struct format. + One of B, s, H, I, 2I, b, h, i, 2i, f, d, Q, or q. + count : int + Number of data values. Not used for string or byte string + values. + value : sequence + 'Count' values compatible with 'dtype'. + Byte strings must contain count values of dtype packed as + binary data. + writeonce : bool + If True, the tag is written to the first page only. + + """ + # TODO: refactor this function + fh = self._fh + byteorder = self._byteorder + + if data is None: + if compress: + raise ValueError("cannot save compressed empty file") + datashape = shape + datadtype = numpy.dtype(dtype).newbyteorder(byteorder) + datadtypechar = datadtype.char + else: + data = numpy.asarray(data, byteorder + data.dtype.char, "C") + if data.size == 0: + raise ValueError("cannot save empty array") + datashape = data.shape + datadtype = data.dtype + datadtypechar = data.dtype.char + + returnoffset = returnoffset and datadtype.isnative + bilevel = datadtypechar == "?" + if bilevel: + index = -1 if datashape[-1] > 1 else -2 + datasize = product(datashape[:index]) + if datashape[index] % 8: + datasize *= datashape[index] // 8 + 1 + else: + datasize *= datashape[index] // 8 + else: + datasize = product(datashape) * datadtype.itemsize + + # just append contiguous data if possible + self._truncate = bool(truncate) + if self._datashape: + if ( + not contiguous + or self._datashape[1:] != datashape + or self._datadtype != datadtype + or (compress and self._tags) + or tile + or not numpy.array_equal(colormap, self._colormap) + ): + # incompatible shape, dtype, compression mode, or colormap + self._write_remaining_pages() + self._write_image_description() + self._truncate = False + self._descriptionoffset = 0 + self._descriptionlenoffset = 0 + self._datashape = None + self._colormap = None + if self._imagej: + raise ValueError("ImageJ does not support non-contiguous data") + else: + # consecutive mode + self._datashape = (self._datashape[0] + 1,) + datashape + if not compress: + # write contiguous data, write IFDs/tags later + offset = fh.tell() + if data is None: + fh.write_empty(datasize) + else: + fh.write_array(data) + if returnoffset: + return offset, datasize + return + + input_shape = datashape + tagnoformat = self._tagnoformat + valueformat = self._valueformat + offsetformat = self._offsetformat + offsetsize = self._offsetsize + tagsize = self._tagsize + + MINISBLACK = TIFF.PHOTOMETRIC.MINISBLACK + RGB = TIFF.PHOTOMETRIC.RGB + CFA = TIFF.PHOTOMETRIC.CFA + PALETTE = TIFF.PHOTOMETRIC.PALETTE + CONTIG = TIFF.PLANARCONFIG.CONTIG + SEPARATE = TIFF.PLANARCONFIG.SEPARATE + + # parse input + if photometric is not None: + photometric = enumarg(TIFF.PHOTOMETRIC, photometric) + if planarconfig: + planarconfig = enumarg(TIFF.PLANARCONFIG, planarconfig) + if not compress: + compress = False + compresstag = 1 + predictor = False + else: + if isinstance(compress, (tuple, list)): + compress, compresslevel = compress + elif isinstance(compress, int): + compress, compresslevel = "ADOBE_DEFLATE", int(compress) + if not 0 <= compresslevel <= 9: + raise ValueError("invalid compression level %s" % compress) + else: + compresslevel = None + compress = compress.upper() + compresstag = enumarg(TIFF.COMPRESSION, compress) + + # prepare ImageJ format + if self._imagej: + if compress in ("LZMA", "ZSTD"): + raise ValueError("ImageJ cannot handle LZMA or ZSTD compression") + if description: + warnings.warn("not writing description to ImageJ file") + description = None + volume = False + if datadtypechar not in "BHhf": + raise ValueError("ImageJ does not support data type %s" % datadtypechar) + ijrgb = photometric == RGB if photometric else None + if datadtypechar not in "B": + ijrgb = False + ijshape = imagej_shape(datashape, ijrgb) + if ijshape[-1] in (3, 4): + photometric = RGB + if datadtypechar not in "B": + raise ValueError( + "ImageJ does not support data type %s " + "for RGB" % datadtypechar + ) + elif photometric is None: + photometric = MINISBLACK + planarconfig = None + if planarconfig == SEPARATE: + raise ValueError("ImageJ does not support planar images") + else: + planarconfig = CONTIG if ijrgb else None + + # define compress function + if compress: + if compresslevel is None: + compressor, compresslevel = TIFF.COMPESSORS[compresstag] + else: + compressor, _ = TIFF.COMPESSORS[compresstag] + compresslevel = int(compresslevel) + if predictor: + if datadtype.kind not in "iu": + raise ValueError("prediction not implemented for %s" % datadtype) + + def compress(data, level=compresslevel): + # horizontal differencing + diff = numpy.diff(data, axis=-2) + data = numpy.insert(diff, 0, data[..., 0, :], axis=-2) + return compressor(data, level) + + else: + + def compress(data, level=compresslevel): + return compressor(data, level) + + # verify colormap and indices + if colormap is not None: + if datadtypechar not in "BH": + raise ValueError("invalid data dtype for palette mode") + colormap = numpy.asarray(colormap, dtype=byteorder + "H") + if colormap.shape != (3, 2 ** (datadtype.itemsize * 8)): + raise ValueError("invalid color map shape") + self._colormap = colormap + + # verify tile shape + if tile: + tile = tuple(int(i) for i in tile[:3]) + volume = len(tile) == 3 + if ( + len(tile) < 2 + or tile[-1] % 16 + or tile[-2] % 16 + or any(i < 1 for i in tile) + ): + raise ValueError("invalid tile shape") + else: + tile = () + volume = False + + # normalize data shape to 5D or 6D, depending on volume: + # (pages, planar_samples, [depth,] height, width, contig_samples) + datashape = reshape_nd(datashape, 3 if photometric == RGB else 2) + shape = datashape + ndim = len(datashape) + + samplesperpixel = 1 + extrasamples = 0 + if volume and ndim < 3: + volume = False + if colormap is not None: + photometric = PALETTE + planarconfig = None + if photometric is None: + photometric = MINISBLACK + if bilevel: + photometric = TIFF.PHOTOMETRIC.MINISWHITE + elif planarconfig == CONTIG: + if ndim > 2 and shape[-1] in (3, 4): + photometric = RGB + elif planarconfig == SEPARATE: + if volume and ndim > 3 and shape[-4] in (3, 4): + photometric = RGB + elif ndim > 2 and shape[-3] in (3, 4): + photometric = RGB + elif ndim > 2 and shape[-1] in (3, 4): + photometric = RGB + elif self._imagej: + photometric = MINISBLACK + elif volume and ndim > 3 and shape[-4] in (3, 4): + photometric = RGB + elif ndim > 2 and shape[-3] in (3, 4): + photometric = RGB + if planarconfig and len(shape) <= (3 if volume else 2): + planarconfig = None + photometric = MINISBLACK + if photometric == RGB: + if len(shape) < 3: + raise ValueError("not a RGB(A) image") + if len(shape) < 4: + volume = False + if planarconfig is None: + if shape[-1] in (3, 4): + planarconfig = CONTIG + elif shape[-4 if volume else -3] in (3, 4): + planarconfig = SEPARATE + elif shape[-1] > shape[-4 if volume else -3]: + planarconfig = SEPARATE + else: + planarconfig = CONTIG + if planarconfig == CONTIG: + datashape = (-1, 1) + shape[(-4 if volume else -3) :] + samplesperpixel = datashape[-1] + else: + datashape = (-1,) + shape[(-4 if volume else -3) :] + (1,) + samplesperpixel = datashape[1] + if samplesperpixel > 3: + extrasamples = samplesperpixel - 3 + elif photometric == CFA: + if len(shape) != 2: + raise ValueError("invalid CFA image") + volume = False + planarconfig = None + datashape = (-1, 1) + shape[-2:] + (1,) + if 50706 not in (et[0] for et in extratags): + raise ValueError("must specify DNG tags for CFA image") + elif planarconfig and len(shape) > (3 if volume else 2): + if planarconfig == CONTIG: + datashape = (-1, 1) + shape[(-4 if volume else -3) :] + samplesperpixel = datashape[-1] + else: + datashape = (-1,) + shape[(-4 if volume else -3) :] + (1,) + samplesperpixel = datashape[1] + extrasamples = samplesperpixel - 1 + else: + planarconfig = None + # remove trailing 1s + while len(shape) > 2 and shape[-1] == 1: + shape = shape[:-1] + if len(shape) < 3: + volume = False + datashape = (-1, 1) + shape[(-3 if volume else -2) :] + (1,) + + # normalize shape to 6D + assert len(datashape) in (5, 6) + if len(datashape) == 5: + datashape = datashape[:2] + (1,) + datashape[2:] + if datashape[0] == -1: + s0 = product(input_shape) // product(datashape[1:]) + datashape = (s0,) + datashape[1:] + shape = datashape + if data is not None: + data = data.reshape(shape) + + if tile and not volume: + tile = (1, tile[-2], tile[-1]) + + if photometric == PALETTE: + if samplesperpixel != 1 or extrasamples or shape[1] != 1 or shape[-1] != 1: + raise ValueError("invalid data shape for palette mode") + + if photometric == RGB and samplesperpixel == 2: + raise ValueError("not a RGB image (samplesperpixel=2)") + + if bilevel: + if compress: + raise ValueError("cannot save compressed bilevel image") + if tile: + raise ValueError("cannot save tiled bilevel image") + if photometric not in (0, 1): + raise ValueError("cannot save bilevel image as %s" % str(photometric)) + datashape = list(datashape) + if datashape[-2] % 8: + datashape[-2] = datashape[-2] // 8 + 1 + else: + datashape[-2] = datashape[-2] // 8 + datashape = tuple(datashape) + assert datasize == product(datashape) + if data is not None: + data = numpy.packbits(data, axis=-2) + assert datashape[-2] == data.shape[-2] + + bytestr = ( + bytes + if sys.version[0] == "2" + else (lambda x: bytes(x, "ascii") if isinstance(x, str) else x) + ) + tags = [] # list of (code, ifdentry, ifdvalue, writeonce) + + strip_or_tile = "Tile" if tile else "Strip" + tagbytecounts = TIFF.TAG_NAMES[strip_or_tile + "ByteCounts"] + tag_offsets = TIFF.TAG_NAMES[strip_or_tile + "Offsets"] + self._tagoffsets = tag_offsets + + def pack(fmt, *val): + return struct.pack(byteorder + fmt, *val) + + def addtag(code, dtype, count, value, writeonce=False): + # Compute ifdentry & ifdvalue bytes from code, dtype, count, value + # Append (code, ifdentry, ifdvalue, writeonce) to tags list + code = int(TIFF.TAG_NAMES.get(code, code)) + try: + tifftype = TIFF.DATA_DTYPES[dtype] + except KeyError: + raise ValueError("unknown dtype %s" % dtype) + rawcount = count + + if dtype == "s": + # strings + value = bytestr(value) + b"\0" + count = rawcount = len(value) + rawcount = value.find(b"\0\0") + if rawcount < 0: + rawcount = count + else: + rawcount += 1 # length of string without buffer + value = (value,) + elif isinstance(value, bytes): + # packed binary data + dtsize = struct.calcsize(dtype) + if len(value) % dtsize: + raise ValueError("invalid packed binary data") + count = len(value) // dtsize + if len(dtype) > 1: + count *= int(dtype[:-1]) + dtype = dtype[-1] + ifdentry = [pack("HH", code, tifftype), pack(offsetformat, rawcount)] + ifdvalue = None + if struct.calcsize(dtype) * count <= offsetsize: + # value(s) can be written directly + if isinstance(value, bytes): + ifdentry.append(pack(valueformat, value)) + elif count == 1: + if isinstance(value, (tuple, list, numpy.ndarray)): + value = value[0] + ifdentry.append(pack(valueformat, pack(dtype, value))) + else: + ifdentry.append(pack(valueformat, pack(str(count) + dtype, *value))) + else: + # use offset to value(s) + ifdentry.append(pack(offsetformat, 0)) + if isinstance(value, bytes): + ifdvalue = value + elif isinstance(value, numpy.ndarray): + assert value.size == count + assert value.dtype.char == dtype + ifdvalue = value.tostring() + elif isinstance(value, (tuple, list)): + ifdvalue = pack(str(count) + dtype, *value) + else: + ifdvalue = pack(dtype, value) + tags.append((code, b"".join(ifdentry), ifdvalue, writeonce)) + + def rational(arg, max_denominator=1000000): + """ "Return nominator and denominator from float or two integers.""" + from fractions import Fraction # delayed import + + try: + f = Fraction.from_float(arg) + except TypeError: + f = Fraction(arg[0], arg[1]) + f = f.limit_denominator(max_denominator) + return f.numerator, f.denominator + + if description: + # user provided description + addtag("ImageDescription", "s", 0, description, writeonce=True) + + # write shape and metadata to ImageDescription + self._metadata = {} if not metadata else metadata.copy() + if self._imagej: + description = imagej_description( + input_shape, + shape[-1] in (3, 4), + self._colormap is not None, + **self._metadata + ) + elif metadata or metadata == {}: + if self._truncate: + self._metadata.update(truncated=True) + description = json_description(input_shape, **self._metadata) + else: + description = None + if description: + # add 64 bytes buffer + # the image description might be updated later with the final shape + description = str2bytes(description, "ascii") + description += b"\0" * 64 + self._descriptionlen = len(description) + addtag("ImageDescription", "s", 0, description, writeonce=True) + + if software: + addtag("Software", "s", 0, software, writeonce=True) + if datetime is None: + datetime = self._now() + addtag( + "DateTime", "s", 0, datetime.strftime("%Y:%m:%d %H:%M:%S"), writeonce=True + ) + addtag("Compression", "H", 1, compresstag) + if predictor: + addtag("Predictor", "H", 1, 2) + addtag("ImageWidth", "I", 1, shape[-2]) + addtag("ImageLength", "I", 1, shape[-3]) + if tile: + addtag("TileWidth", "I", 1, tile[-1]) + addtag("TileLength", "I", 1, tile[-2]) + if tile[0] > 1: + addtag("ImageDepth", "I", 1, shape[-4]) + addtag("TileDepth", "I", 1, tile[0]) + addtag("NewSubfileType", "I", 1, 0) + if not bilevel: + sampleformat = {"u": 1, "i": 2, "f": 3, "c": 6}[datadtype.kind] + addtag( + "SampleFormat", "H", samplesperpixel, (sampleformat,) * samplesperpixel + ) + addtag("PhotometricInterpretation", "H", 1, photometric.value) + if colormap is not None: + addtag("ColorMap", "H", colormap.size, colormap) + addtag("SamplesPerPixel", "H", 1, samplesperpixel) + if bilevel: + pass + elif planarconfig and samplesperpixel > 1: + addtag("PlanarConfiguration", "H", 1, planarconfig.value) + addtag( + "BitsPerSample", + "H", + samplesperpixel, + (datadtype.itemsize * 8,) * samplesperpixel, + ) + else: + addtag("BitsPerSample", "H", 1, datadtype.itemsize * 8) + if extrasamples: + if photometric == RGB and extrasamples == 1: + addtag("ExtraSamples", "H", 1, 1) # associated alpha channel + else: + addtag("ExtraSamples", "H", extrasamples, (0,) * extrasamples) + if resolution is not None: + addtag("XResolution", "2I", 1, rational(resolution[0])) + addtag("YResolution", "2I", 1, rational(resolution[1])) + if len(resolution) > 2: + unit = resolution[2] + unit = 1 if unit is None else enumarg(TIFF.RESUNIT, unit) + elif self._imagej: + unit = 1 + else: + unit = 2 + addtag("ResolutionUnit", "H", 1, unit) + elif not self._imagej: + addtag("XResolution", "2I", 1, (1, 1)) + addtag("YResolution", "2I", 1, (1, 1)) + addtag("ResolutionUnit", "H", 1, 1) + if ijmetadata: + for t in imagej_metadata_tags(ijmetadata, byteorder): + addtag(*t) + + contiguous = not compress + if tile: + # one chunk per tile per plane + tiles = ( + (shape[2] + tile[0] - 1) // tile[0], + (shape[3] + tile[1] - 1) // tile[1], + (shape[4] + tile[2] - 1) // tile[2], + ) + numtiles = product(tiles) * shape[1] + stripbytecounts = [ + product(tile) * shape[-1] * datadtype.itemsize + ] * numtiles + addtag(tagbytecounts, offsetformat, numtiles, stripbytecounts) + addtag(tag_offsets, offsetformat, numtiles, [0] * numtiles) + contiguous = contiguous and product(tiles) == 1 + if not contiguous: + # allocate tile buffer + chunk = numpy.empty(tile + (shape[-1],), dtype=datadtype) + elif contiguous: + # one strip per plane + if bilevel: + stripbytecounts = [product(datashape[2:])] * shape[1] + else: + stripbytecounts = [product(datashape[2:]) * datadtype.itemsize] * shape[ + 1 + ] + addtag(tagbytecounts, offsetformat, shape[1], stripbytecounts) + addtag(tag_offsets, offsetformat, shape[1], [0] * shape[1]) + addtag("RowsPerStrip", "I", 1, shape[-3]) + else: + # compress rowsperstrip or ~64 KB chunks + rowsize = product(shape[-2:]) * datadtype.itemsize + if rowsperstrip is None: + rowsperstrip = 65536 // rowsize + if rowsperstrip < 1: + rowsperstrip = 1 + elif rowsperstrip > shape[-3]: + rowsperstrip = shape[-3] + addtag("RowsPerStrip", "I", 1, rowsperstrip) + + numstrips = (shape[-3] + rowsperstrip - 1) // rowsperstrip + numstrips *= shape[1] + stripbytecounts = [0] * numstrips + addtag(tagbytecounts, offsetformat, numstrips, [0] * numstrips) + addtag(tag_offsets, offsetformat, numstrips, [0] * numstrips) + + if data is None and not contiguous: + raise ValueError("cannot write non-contiguous empty file") + + # add extra tags from user + for t in extratags: + addtag(*t) + + # TODO: check TIFFReadDirectoryCheckOrder warning in files containing + # multiple tags of same code + # the entries in an IFD must be sorted in ascending order by tag code + tags = sorted(tags, key=lambda x: x[0]) + + if not (self._bigtiff or self._imagej) and (fh.tell() + datasize > 2**31 - 1): + raise ValueError("data too large for standard TIFF file") + + # if not compressed or multi-tiled, write the first IFD and then + # all data contiguously; else, write all IFDs and data interleaved + for pageindex in range(1 if contiguous else shape[0]): + # update pointer at ifd_offset + pos = fh.tell() + if pos % 2: + # location of IFD must begin on a word boundary + fh.write(b"\0") + pos += 1 + fh.seek(self._ifdoffset) + fh.write(pack(offsetformat, pos)) + fh.seek(pos) + + # write ifdentries + fh.write(pack(tagnoformat, len(tags))) + tag_offset = fh.tell() + fh.write(b"".join(t[1] for t in tags)) + self._ifdoffset = fh.tell() + fh.write(pack(offsetformat, 0)) # offset to next IFD + + # write tag values and patch offsets in ifdentries, if necessary + for tagindex, tag in enumerate(tags): + if tag[2]: + pos = fh.tell() + if pos % 2: + # tag value is expected to begin on word boundary + fh.write(b"\0") + pos += 1 + fh.seek(tag_offset + tagindex * tagsize + offsetsize + 4) + fh.write(pack(offsetformat, pos)) + fh.seek(pos) + if tag[0] == tag_offsets: + stripoffsetsoffset = pos + elif tag[0] == tagbytecounts: + strip_bytecounts_offset = pos + elif tag[0] == 270 and tag[2].endswith(b"\0\0\0\0"): + # image description buffer + self._descriptionoffset = pos + self._descriptionlenoffset = tag_offset + tagindex * tagsize + 4 + fh.write(tag[2]) + + # write image data + data_offset = fh.tell() + skip = align - data_offset % align + fh.seek(skip, 1) + data_offset += skip + if contiguous: + if data is None: + fh.write_empty(datasize) + else: + fh.write_array(data) + elif tile: + if data is None: + fh.write_empty(numtiles * stripbytecounts[0]) + else: + stripindex = 0 + for plane in data[pageindex]: + for tz in range(tiles[0]): + for ty in range(tiles[1]): + for tx in range(tiles[2]): + c0 = min(tile[0], shape[2] - tz * tile[0]) + c1 = min(tile[1], shape[3] - ty * tile[1]) + c2 = min(tile[2], shape[4] - tx * tile[2]) + chunk[c0:, c1:, c2:] = 0 + chunk[:c0, :c1, :c2] = plane[ + tz * tile[0] : tz * tile[0] + c0, + ty * tile[1] : ty * tile[1] + c1, + tx * tile[2] : tx * tile[2] + c2, + ] + if compress: + t = compress(chunk) + fh.write(t) + stripbytecounts[stripindex] = len(t) + stripindex += 1 + else: + fh.write_array(chunk) + fh.flush() + elif compress: + # write one strip per rowsperstrip + assert data.shape[2] == 1 # not handling depth + numstrips = (shape[-3] + rowsperstrip - 1) // rowsperstrip + stripindex = 0 + for plane in data[pageindex]: + for i in range(numstrips): + strip = plane[0, i * rowsperstrip : (i + 1) * rowsperstrip] + strip = compress(strip) + fh.write(strip) + stripbytecounts[stripindex] = len(strip) + stripindex += 1 + + # update strip/tile offsets and bytecounts if necessary + pos = fh.tell() + for tagindex, tag in enumerate(tags): + if tag[0] == tag_offsets: # strip/tile offsets + if tag[2]: + fh.seek(stripoffsetsoffset) + strip_offset = data_offset + for size in stripbytecounts: + fh.write(pack(offsetformat, strip_offset)) + strip_offset += size + else: + fh.seek(tag_offset + tagindex * tagsize + offsetsize + 4) + fh.write(pack(offsetformat, data_offset)) + elif tag[0] == tagbytecounts: # strip/tile bytecounts + if compress: + if tag[2]: + fh.seek(strip_bytecounts_offset) + for size in stripbytecounts: + fh.write(pack(offsetformat, size)) + else: + fh.seek(tag_offset + tagindex * tagsize + offsetsize + 4) + fh.write(pack(offsetformat, stripbytecounts[0])) + break + fh.seek(pos) + fh.flush() + + # remove tags that should be written only once + if pageindex == 0: + tags = [tag for tag in tags if not tag[-1]] + + self._shape = shape + self._datashape = (1,) + input_shape + self._datadtype = datadtype + self._dataoffset = data_offset + self._databytecounts = stripbytecounts + + if contiguous: + # write remaining IFDs/tags later + self._tags = tags + # return offset and size of image data + if returnoffset: + return data_offset, sum(stripbytecounts) + + def _write_remaining_pages(self): + """Write outstanding IFDs and tags to file.""" + if not self._tags or self._truncate: + return + + fh = self._fh + fhpos = fh.tell() + if fhpos % 2: + fh.write(b"\0") + fhpos += 1 + byteorder = self._byteorder + offsetformat = self._offsetformat + offsetsize = self._offsetsize + tagnoformat = self._tagnoformat + tagsize = self._tagsize + dataoffset = self._dataoffset + pagedatasize = sum(self._databytecounts) + pageno = self._shape[0] * self._datashape[0] - 1 + + def pack(fmt, *val): + return struct.pack(byteorder + fmt, *val) + + # construct template IFD in memory + # need to patch offsets to next IFD and data before writing to disk + ifd = io.BytesIO() + ifd.write(pack(tagnoformat, len(self._tags))) + tagoffset = ifd.tell() + ifd.write(b"".join(t[1] for t in self._tags)) + ifdoffset = ifd.tell() + ifd.write(pack(offsetformat, 0)) # offset to next IFD + # tag values + for tagindex, tag in enumerate(self._tags): + offset2value = tagoffset + tagindex * tagsize + offsetsize + 4 + if tag[2]: + pos = ifd.tell() + if pos % 2: # tag value is expected to begin on word boundary + ifd.write(b"\0") + pos += 1 + ifd.seek(offset2value) + try: + ifd.write(pack(offsetformat, pos + fhpos)) + except Exception: # struct.error + if self._imagej: + warnings.warn("truncating ImageJ file") + self._truncate = True + return + raise ValueError("data too large for non-BigTIFF file") + ifd.seek(pos) + ifd.write(tag[2]) + if tag[0] == self._tagoffsets: + # save strip/tile offsets for later updates + stripoffset2offset = offset2value + stripoffset2value = pos + elif tag[0] == self._tagoffsets: + # save strip/tile offsets for later updates + stripoffset2offset = None + stripoffset2value = offset2value + # size to word boundary + if ifd.tell() % 2: + ifd.write(b"\0") + + # check if all IFDs fit in file + pos = fh.tell() + if not self._bigtiff and pos + ifd.tell() * pageno > 2**32 - 256: + if self._imagej: + warnings.warn("truncating ImageJ file") + self._truncate = True + return + raise ValueError("data too large for non-BigTIFF file") + + # TODO: assemble IFD chain in memory + for _ in range(pageno): + # update pointer at IFD offset + pos = fh.tell() + fh.seek(self._ifdoffset) + fh.write(pack(offsetformat, pos)) + fh.seek(pos) + self._ifdoffset = pos + ifdoffset + # update strip/tile offsets in IFD + dataoffset += pagedatasize # offset to image data + if stripoffset2offset is None: + ifd.seek(stripoffset2value) + ifd.write(pack(offsetformat, dataoffset)) + else: + ifd.seek(stripoffset2offset) + ifd.write(pack(offsetformat, pos + stripoffset2value)) + ifd.seek(stripoffset2value) + stripoffset = dataoffset + for size in self._databytecounts: + ifd.write(pack(offsetformat, stripoffset)) + stripoffset += size + # write IFD entry + fh.write(ifd.getvalue()) + + self._tags = None + self._datadtype = None + self._dataoffset = None + self._databytecounts = None + # do not reset _shape or _data_shape + + def _write_image_description(self): + """Write meta data to ImageDescription tag.""" + if ( + not self._datashape + or self._datashape[0] == 1 + or self._descriptionoffset <= 0 + ): + return + + colormapped = self._colormap is not None + if self._imagej: + isrgb = self._shape[-1] in (3, 4) + description = imagej_description( + self._datashape, isrgb, colormapped, **self._metadata + ) + else: + description = json_description(self._datashape, **self._metadata) + + # rewrite description and its length to file + description = description.encode("utf-8") + description = description[: self._descriptionlen - 1] + pos = self._fh.tell() + self._fh.seek(self._descriptionoffset) + self._fh.write(description) + self._fh.seek(self._descriptionlenoffset) + self._fh.write( + struct.pack(self._byteorder + self._offsetformat, len(description) + 1) + ) + self._fh.seek(pos) + + self._descriptionoffset = 0 + self._descriptionlenoffset = 0 + self._descriptionlen = 0 + + def _now(self): + """Return current date and time.""" + return datetime.datetime.now() + + def close(self): + """Write remaining pages and close file handle.""" + if not self._truncate: + self._write_remaining_pages() + self._write_image_description() + self._fh.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + +class TiffFile(object): + """Read image and metadata from TIFF file. + + TiffFile instances must be closed using the 'close' method, which is + automatically called when using the 'with' context manager. + + Attributes + ---------- + pages : TiffPages + Sequence of TIFF pages in file. + series : list of TiffPageSeries + Sequences of closely related TIFF pages. These are computed + from OME, LSM, ImageJ, etc. metadata or based on similarity + of page properties such as shape, dtype, and compression. + byteorder : '>', '<' + The endianness of data in the file. + '>': big-endian (Motorola). + '>': little-endian (Intel). + is_flag : bool + If True, file is of a certain format. + Flags are: bigtiff, movie, shaped, ome, imagej, stk, lsm, fluoview, + nih, vista, 'micromanager, metaseries, mdgel, mediacy, tvips, fei, + sem, scn, svs, scanimage, andor, epics, pilatus, qptiff. + + All attributes are read-only. + + Examples + -------- + >>> # read image array from TIFF file + >>> imsave('temp.tif', numpy.random.rand(5, 301, 219)) + >>> with TiffFile('temp.tif') as tif: + ... data = tif.asarray() + >>> data.shape + (5, 301, 219) + + """ + + def __init__( + self, + arg, + name=None, + offset=None, + size=None, + multifile=True, + movie=None, + **kwargs + ): + """Initialize instance from file. + + Parameters + ---------- + arg : str or open file + Name of file or open file object. + The file objects are closed in TiffFile.close(). + name : str + Optional name of file in case 'arg' is a file handle. + offset : int + Optional start position of embedded file. By default, this is + the current file position. + size : int + Optional size of embedded file. By default, this is the number + of bytes from the 'offset' to the end of the file. + multifile : bool + If True (default), series may include pages from multiple files. + Currently applies to OME-TIFF only. + movie : bool + If True, assume that later pages differ from first page only by + data offsets and byte counts. Significantly increases speed and + reduces memory usage when reading movies with thousands of pages. + Enabling this for non-movie files will result in data corruption + or crashes. Python 3 only. + kwargs : bool + 'is_ome': If False, disable processing of OME-XML metadata. + + """ + if "fastij" in kwargs: + del kwargs["fastij"] + raise DeprecationWarning("the fastij option will be removed") + for key, value in kwargs.items(): + if key[:3] == "is_" and key[3:] in TIFF.FILE_FLAGS: + if value is not None and not value: + setattr(self, key, bool(value)) + else: + raise TypeError("unexpected keyword argument: %s" % key) + + fh = FileHandle(arg, mode="rb", name=name, offset=offset, size=size) + self._fh = fh + self._multifile = bool(multifile) + self._files = {fh.name: self} # cache of TiffFiles + try: + fh.seek(0) + try: + byteorder = {b"II": "<", b"MM": ">"}[fh.read(2)] + except KeyError: + raise ValueError("not a TIFF file") + sys_byteorder = {"big": ">", "little": "<"}[sys.byteorder] + self.isnative = byteorder == sys_byteorder + + version = struct.unpack(byteorder + "H", fh.read(2))[0] + if version == 43: + # BigTiff + self.is_bigtiff = True + offsetsize, zero = struct.unpack(byteorder + "HH", fh.read(4)) + if zero or offsetsize != 8: + raise ValueError("invalid BigTIFF file") + self.byteorder = byteorder + self.offsetsize = 8 + self.offsetformat = byteorder + "Q" + self.tagnosize = 8 + self.tagnoformat = byteorder + "Q" + self.tagsize = 20 + self.tagformat1 = byteorder + "HH" + self.tagformat2 = byteorder + "Q8s" + elif version == 42: + self.is_bigtiff = False + self.byteorder = byteorder + self.offsetsize = 4 + self.offsetformat = byteorder + "I" + self.tagnosize = 2 + self.tagnoformat = byteorder + "H" + self.tagsize = 12 + self.tagformat1 = byteorder + "HH" + self.tagformat2 = byteorder + "I4s" + else: + raise ValueError("invalid TIFF file") + + # file handle is at offset to offset to first page + self.pages = TiffPages(self) + + if self.is_lsm and ( + self.filehandle.size >= 2**32 + or self.pages[0].compression != 1 + or self.pages[1].compression != 1 + ): + self._lsm_load_pages() + self._lsm_fix_strip_offsets() + self._lsm_fix_strip_bytecounts() + elif movie: + self.pages.useframes = True + + except Exception: + fh.close() + raise + + @property + def filehandle(self): + """Return file handle.""" + return self._fh + + @property + def filename(self): + """Return name of file handle.""" + return self._fh.name + + @lazyattr + def fstat(self): + """Return status of file handle as stat_result object.""" + try: + return os.fstat(self._fh.fileno()) + except Exception: # io.UnsupportedOperation + return None + + def close(self): + """Close open file handle(s).""" + for tif in self._files.values(): + tif.filehandle.close() + self._files = {} + + def asarray(self, key=None, series=None, out=None, validate=True, maxworkers=1): + """Return image data from multiple TIFF pages as numpy array. + + By default, the data from the first series is returned. + + Parameters + ---------- + key : int, slice, or sequence of page indices + Defines which pages to return as array. + series : int or TiffPageSeries + Defines which series of pages to return as array. + out : numpy.ndarray, str, or file-like object; optional + Buffer where image data will be saved. + If None (default), a new array will be created. + If numpy.ndarray, a writable array of compatible dtype and shape. + If 'memmap', directly memory-map the image data in the TIFF file + if possible; else create a memory-mapped array in a temporary file. + If str or open file, the file name or file object used to + create a memory-map to an array stored in a binary file on disk. + validate : bool + If True (default), validate various tags. + Passed to TiffPage.asarray(). + maxworkers : int + Maximum number of threads to concurrently get data from pages. + Default is 1. If None, up to half the CPU cores are used. + Reading data from file is limited to a single thread. + Using multiple threads can significantly speed up this function + if the bottleneck is decoding compressed data, e.g. in case of + large LZW compressed LSM files. + If the bottleneck is I/O or pure Python code, using multiple + threads might be detrimental. + + """ + if not self.pages: + return numpy.array([]) + if key is None and series is None: + series = 0 + if series is not None: + try: + series = self.series[series] + except (KeyError, TypeError): + pass + pages = series._pages + else: + pages = self.pages + + if key is None: + pass + elif isinstance(key, inttypes): + pages = [pages[key]] + elif isinstance(key, slice): + pages = pages[key] + elif isinstance(key, collections.Iterable): + pages = [pages[k] for k in key] + else: + raise TypeError("key must be an int, slice, or sequence") + + if not pages: + raise ValueError("no pages selected") + + if self.is_nih: + result = stack_pages(pages, out=out, maxworkers=maxworkers, squeeze=False) + elif key is None and series and series.offset: + typecode = self.byteorder + series.dtype.char + if out == "memmap" and pages[0].is_memmappable: + result = self.filehandle.memmap_array( + typecode, series.shape, series.offset + ) + else: + if out is not None: + out = create_output(out, series.shape, series.dtype) + self.filehandle.seek(series.offset) + result = self.filehandle.read_array( + typecode, product(series.shape), out=out, native=True + ) + elif len(pages) == 1: + result = pages[0].asarray(out=out, validate=validate) + else: + result = stack_pages(pages, out=out, maxworkers=maxworkers) + + if result is None: + return + + if key is None: + try: + result.shape = series.shape + except ValueError: + try: + warnings.warn( + "failed to reshape %s to %s" % (result.shape, series.shape) + ) + # try series of expected shapes + result.shape = (-1,) + series.shape + except ValueError: + # revert to generic shape + result.shape = (-1,) + pages[0].shape + elif len(pages) == 1: + result.shape = pages[0].shape + else: + result.shape = (-1,) + pages[0].shape + return result + + @lazyattr + def series(self): + """Return related pages as TiffPageSeries. + + Side effect: after calling this function, TiffFile.pages might contain + TiffPage and TiffFrame instances. + + """ + if not self.pages: + return [] + + useframes = self.pages.useframes + keyframe = self.pages.keyframe + series = [] + for name in "ome imagej lsm fluoview nih mdgel shaped".split(): + if getattr(self, "is_" + name, False): + series = getattr(self, "_%s_series" % name)() + break + self.pages.useframes = useframes + self.pages.keyframe = keyframe + if not series: + series = self._generic_series() + + # remove empty series, e.g. in MD Gel files + series = [s for s in series if sum(s.shape) > 0] + + for i, s in enumerate(series): + s.index = i + return series + + def _generic_series(self): + """Return image series in file.""" + if self.pages.useframes: + # movie mode + page = self.pages[0] + shape = page.shape + axes = page.axes + if len(self.pages) > 1: + shape = (len(self.pages),) + shape + axes = "I" + axes + return [ + TiffPageSeries(self.pages[:], shape, page.dtype, axes, stype="movie") + ] + + self.pages.clear(False) + self.pages.load() + result = [] + keys = [] + series = {} + compressions = TIFF.DECOMPESSORS + for page in self.pages: + if not page.shape: + continue + key = page.shape + (page.axes, page.compression in compressions) + if key in series: + series[key].append(page) + else: + keys.append(key) + series[key] = [page] + for key in keys: + pages = series[key] + page = pages[0] + shape = page.shape + axes = page.axes + if len(pages) > 1: + shape = (len(pages),) + shape + axes = "I" + axes + result.append( + TiffPageSeries(pages, shape, page.dtype, axes, stype="Generic") + ) + + return result + + def _shaped_series(self): + """Return image series in "shaped" file.""" + pages = self.pages + pages.useframes = True + lenpages = len(pages) + + def append_series(series, pages, axes, shape, reshape, name, truncated): + page = pages[0] + if not axes: + shape = page.shape + axes = page.axes + if len(pages) > 1: + shape = (len(pages),) + shape + axes = "Q" + axes + size = product(shape) + resize = product(reshape) + if page.is_contiguous and resize > size and resize % size == 0: + if truncated is None: + truncated = True + axes = "Q" + axes + shape = (resize // size,) + shape + try: + axes = reshape_axes(axes, shape, reshape) + shape = reshape + except ValueError as e: + warnings.warn(str(e)) + series.append( + TiffPageSeries( + pages, + shape, + page.dtype, + axes, + name=name, + stype="Shaped", + truncated=truncated, + ) + ) + + keyframe = axes = shape = reshape = name = None + series = [] + index = 0 + while True: + if index >= lenpages: + break + # new keyframe; start of new series + pages.keyframe = index + keyframe = pages[index] + if not keyframe.is_shaped: + warnings.warn("invalid shape metadata or corrupted file") + return + # read metadata + axes = None + shape = None + metadata = json_description_metadata(keyframe.is_shaped) + name = metadata.get("name", "") + reshape = metadata["shape"] + truncated = metadata.get("truncated", None) + if "axes" in metadata: + axes = metadata["axes"] + if len(axes) == len(reshape): + shape = reshape + else: + axes = "" + warnings.warn("axes do not match shape") + # skip pages if possible + spages = [keyframe] + size = product(reshape) + npages, mod = divmod(size, product(keyframe.shape)) + if mod: + warnings.warn("series shape does not match page shape") + return + if 1 < npages <= lenpages - index: + size *= keyframe._dtype.itemsize + if truncated: + npages = 1 + elif ( + keyframe.is_final + and keyframe.offset + size < pages[index + 1].offset + ): + truncated = False + else: + # need to read all pages for series + truncated = False + for j in range(index + 1, index + npages): + page = pages[j] + page.keyframe = keyframe + spages.append(page) + append_series(series, spages, axes, shape, reshape, name, truncated) + index += npages + + return series + + def _imagej_series(self): + """Return image series in ImageJ file.""" + # ImageJ's dimension order is always TZCYXS + # TODO: fix loading of color, composite, or palette images + self.pages.useframes = True + self.pages.keyframe = 0 + + ij = self.imagej_metadata + pages = self.pages + page = pages[0] + + def is_hyperstack(): + # ImageJ hyperstack store all image metadata in the first page and + # image data are stored contiguously before the second page, if any + if not page.is_final: + return False + images = ij.get("images", 0) + if images <= 1: + return False + offset, count = page.is_contiguous + if ( + count != product(page.shape) * page.bitspersample // 8 + or offset + count * images > self.filehandle.size + ): + raise ValueError() + # check that next page is stored after data + if len(pages) > 1 and offset + count * images > pages[1].offset: + return False + return True + + try: + hyperstack = is_hyperstack() + except ValueError: + warnings.warn("invalid ImageJ metadata or corrupted file") + return + if hyperstack: + # no need to read other pages + pages = [page] + else: + self.pages.load() + + shape = [] + axes = [] + if "frames" in ij: + shape.append(ij["frames"]) + axes.append("T") + if "slices" in ij: + shape.append(ij["slices"]) + axes.append("Z") + if "channels" in ij and not ( + page.photometric == 2 and not ij.get("hyperstack", False) + ): + shape.append(ij["channels"]) + axes.append("C") + remain = ij.get("images", len(pages)) // (product(shape) if shape else 1) + if remain > 1: + shape.append(remain) + axes.append("I") + if page.axes[0] == "I": + # contiguous multiple images + shape.extend(page.shape[1:]) + axes.extend(page.axes[1:]) + elif page.axes[:2] == "SI": + # color-mapped contiguous multiple images + shape = page.shape[0:1] + tuple(shape) + page.shape[2:] + axes = list(page.axes[0]) + axes + list(page.axes[2:]) + else: + shape.extend(page.shape) + axes.extend(page.axes) + + truncated = ( + hyperstack + and len(self.pages) == 1 + and page.is_contiguous[1] != product(shape) * page.bitspersample // 8 + ) + + return [ + TiffPageSeries( + pages, shape, page.dtype, axes, stype="ImageJ", truncated=truncated + ) + ] + + def _fluoview_series(self): + """Return image series in FluoView file.""" + self.pages.useframes = True + self.pages.keyframe = 0 + self.pages.load() + mm = self.fluoview_metadata + mmhd = list(reversed(mm["Dimensions"])) + axes = "".join( + TIFF.MM_DIMENSIONS.get(i[0].upper(), "Q") for i in mmhd if i[1] > 1 + ) + shape = tuple(int(i[1]) for i in mmhd if i[1] > 1) + return [ + TiffPageSeries( + self.pages, + shape, + self.pages[0].dtype, + axes, + name=mm["ImageName"], + stype="FluoView", + ) + ] + + def _mdgel_series(self): + """Return image series in MD Gel file.""" + # only a single page, scaled according to metadata in second page + self.pages.useframes = False + self.pages.keyframe = 0 + self.pages.load() + md = self.mdgel_metadata + if md["FileTag"] in (2, 128): + dtype = numpy.dtype("float32") + scale = md["ScalePixel"] + scale = scale[0] / scale[1] # rational + if md["FileTag"] == 2: + # squary root data format + def transform(a): + return a.astype("float32") ** 2 * scale + + else: + + def transform(a): + return a.astype("float32") * scale + + else: + transform = None + page = self.pages[0] + return [ + TiffPageSeries( + [page], page.shape, dtype, page.axes, transform=transform, stype="MDGel" + ) + ] + + def _nih_series(self): + """Return image series in NIH file.""" + self.pages.useframes = True + self.pages.keyframe = 0 + self.pages.load() + page0 = self.pages[0] + if len(self.pages) == 1: + shape = page0.shape + axes = page0.axes + else: + shape = (len(self.pages),) + page0.shape + axes = "I" + page0.axes + return [TiffPageSeries(self.pages, shape, page0.dtype, axes, stype="NIH")] + + def _ome_series(self): + """Return image series in OME-TIFF file(s).""" + from xml.etree import cElementTree as etree # delayed import + + omexml = self.pages[0].description + try: + root = etree.fromstring(omexml) + except etree.ParseError as e: + # TODO: test badly encoded OME-XML + warnings.warn("ome-xml: %s" % e) + try: + # might work on Python 2 + omexml = omexml.decode("utf-8", "ignore").encode("utf-8") + root = etree.fromstring(omexml) + except Exception: + return + + self.pages.useframes = True + self.pages.keyframe = 0 + self.pages.load() + + uuid = root.attrib.get("UUID", None) + self._files = {uuid: self} + dirname = self._fh.dirname + modulo = {} + series = [] + for element in root: + if element.tag.endswith("BinaryOnly"): + # TODO: load OME-XML from master or companion file + warnings.warn("ome-xml: not an ome-tiff master file") + break + if element.tag.endswith("StructuredAnnotations"): + for annot in element: + if not annot.attrib.get("Namespace", "").endswith("modulo"): + continue + for value in annot: + for modul in value: + for along in modul: + if not along.tag[:-1].endswith("Along"): + continue + axis = along.tag[-1] + newaxis = along.attrib.get("Type", "other") + newaxis = TIFF.AXES_LABELS[newaxis] + if "Start" in along.attrib: + step = float(along.attrib.get("Step", 1)) + start = float(along.attrib["Start"]) + stop = float(along.attrib["End"]) + step + labels = numpy.arange(start, stop, step) + else: + labels = [ + label.text + for label in along + if label.tag.endswith("Label") + ] + modulo[axis] = (newaxis, labels) + + if not element.tag.endswith("Image"): + continue + + attr = element.attrib + name = attr.get("Name", None) + + for pixels in element: + if not pixels.tag.endswith("Pixels"): + continue + attr = pixels.attrib + dtype = attr.get("PixelType", None) + axes = "".join(reversed(attr["DimensionOrder"])) + shape = list(int(attr["Size" + ax]) for ax in axes) + size = product(shape[:-2]) + ifds = None + spp = 1 # samples per pixel + # FIXME: this implementation assumes the last two + # dimensions are stored in tiff pages (shape[:-2]). + # Apparently that is not always the case. + for data in pixels: + if data.tag.endswith("Channel"): + attr = data.attrib + if ifds is None: + spp = int(attr.get("SamplesPerPixel", spp)) + ifds = [None] * (size // spp) + elif int(attr.get("SamplesPerPixel", 1)) != spp: + raise ValueError("cannot handle differing SamplesPerPixel") + continue + if ifds is None: + ifds = [None] * (size // spp) + if not data.tag.endswith("TiffData"): + continue + attr = data.attrib + ifd = int(attr.get("IFD", 0)) + num = int(attr.get("NumPlanes", 1 if "IFD" in attr else 0)) + num = int(attr.get("PlaneCount", num)) + idx = [int(attr.get("First" + ax, 0)) for ax in axes[:-2]] + try: + idx = numpy.ravel_multi_index(idx, shape[:-2]) + except ValueError: + # ImageJ produces invalid ome-xml when cropping + warnings.warn("ome-xml: invalid TiffData index") + continue + for uuid in data: + if not uuid.tag.endswith("UUID"): + continue + if uuid.text not in self._files: + if not self._multifile: + # abort reading multifile OME series + # and fall back to generic series + return [] + fname = uuid.attrib["FileName"] + try: + tif = TiffFile(os.path.join(dirname, fname)) + tif.pages.useframes = True + tif.pages.keyframe = 0 + tif.pages.load() + except (IOError, FileNotFoundError, ValueError): + warnings.warn("ome-xml: failed to read '%s'" % fname) + break + self._files[uuid.text] = tif + tif.close() + pages = self._files[uuid.text].pages + try: + for i in range(num if num else len(pages)): + ifds[idx + i] = pages[ifd + i] + except IndexError: + warnings.warn("ome-xml: index out of range") + # only process first UUID + break + else: + pages = self.pages + try: + for i in range(num if num else len(pages)): + ifds[idx + i] = pages[ifd + i] + except IndexError: + warnings.warn("ome-xml: index out of range") + + if all(i is None for i in ifds): + # skip images without data + continue + + # set a keyframe on all IFDs + keyframe = None + for i in ifds: + # try find a TiffPage + if i and i == i.keyframe: + keyframe = i + break + if not keyframe: + # reload a TiffPage from file + for i, keyframe in enumerate(ifds): + if keyframe: + keyframe.parent.pages.keyframe = keyframe.index + keyframe = keyframe.parent.pages[keyframe.index] + ifds[i] = keyframe + break + for i in ifds: + if i is not None: + i.keyframe = keyframe + + dtype = keyframe.dtype + series.append( + TiffPageSeries( + ifds, shape, dtype, axes, parent=self, name=name, stype="OME" + ) + ) + for serie in series: + shape = list(serie.shape) + for axis, (newaxis, labels) in modulo.items(): + i = serie.axes.index(axis) + size = len(labels) + if shape[i] == size: + serie.axes = serie.axes.replace(axis, newaxis, 1) + else: + shape[i] //= size + shape.insert(i + 1, size) + serie.axes = serie.axes.replace(axis, axis + newaxis, 1) + serie.shape = tuple(shape) + # squeeze dimensions + for serie in series: + serie.shape, serie.axes = squeeze_axes(serie.shape, serie.axes) + return series + + def _lsm_series(self): + """Return main image series in LSM file. Skip thumbnails.""" + lsmi = self.lsm_metadata + axes = TIFF.CZ_LSMINFO_SCANTYPE[lsmi["ScanType"]] + if self.pages[0].photometric == 2: # RGB; more than one channel + axes = axes.replace("C", "").replace("XY", "XYC") + if lsmi.get("DimensionP", 0) > 1: + axes += "P" + if lsmi.get("DimensionM", 0) > 1: + axes += "M" + axes = axes[::-1] + shape = tuple(int(lsmi[TIFF.CZ_LSMINFO_DIMENSIONS[i]]) for i in axes) + name = lsmi.get("Name", "") + self.pages.keyframe = 0 + pages = self.pages[::2] + dtype = pages[0].dtype + series = [TiffPageSeries(pages, shape, dtype, axes, name=name, stype="LSM")] + + if self.pages[1].is_reduced: + self.pages.keyframe = 1 + pages = self.pages[1::2] + dtype = pages[0].dtype + cp, i = 1, 0 + while cp < len(pages) and i < len(shape) - 2: + cp *= shape[i] + i += 1 + shape = shape[:i] + pages[0].shape + axes = axes[:i] + "CYX" + series.append( + TiffPageSeries(pages, shape, dtype, axes, name=name, stype="LSMreduced") + ) + + return series + + def _lsm_load_pages(self): + """Load all pages from LSM file.""" + self.pages.cache = True + self.pages.useframes = True + # second series: thumbnails + self.pages.keyframe = 1 + keyframe = self.pages[1] + for page in self.pages[1::2]: + page.keyframe = keyframe + # first series: data + self.pages.keyframe = 0 + keyframe = self.pages[0] + for page in self.pages[::2]: + page.keyframe = keyframe + + def _lsm_fix_strip_offsets(self): + """Unwrap strip offsets for LSM files greater than 4 GB. + + Each series and position require separate unwrapping (undocumented). + + """ + if self.filehandle.size < 2**32: + return + + pages = self.pages + npages = len(pages) + series = self.series[0] + axes = series.axes + + # find positions + positions = 1 + for i in 0, 1: + if series.axes[i] in "PM": + positions *= series.shape[i] + + # make time axis first + if positions > 1: + ntimes = 0 + for i in 1, 2: + if axes[i] == "T": + ntimes = series.shape[i] + break + if ntimes: + div, mod = divmod(npages, 2 * positions * ntimes) + assert mod == 0 + shape = (positions, ntimes, div, 2) + indices = numpy.arange(product(shape)).reshape(shape) + indices = numpy.moveaxis(indices, 1, 0) + else: + indices = numpy.arange(npages).reshape(-1, 2) + + # images of reduced page might be stored first + if pages[0].dataoffsets[0] > pages[1].dataoffsets[0]: + indices = indices[..., ::-1] + + # unwrap offsets + wrap = 0 + previousoffset = 0 + for i in indices.flat: + page = pages[i] + dataoffsets = [] + for currentoffset in page.dataoffsets: + if currentoffset < previousoffset: + wrap += 2**32 + dataoffsets.append(currentoffset + wrap) + previousoffset = currentoffset + page.dataoffsets = tuple(dataoffsets) + + def _lsm_fix_strip_bytecounts(self): + """Set databytecounts to size of compressed data. + + The StripByteCounts tag in LSM files contains the number of bytes + for the uncompressed data. + + """ + pages = self.pages + if pages[0].compression == 1: + return + # sort pages by first strip offset + pages = sorted(pages, key=lambda p: p.dataoffsets[0]) + npages = len(pages) - 1 + for i, page in enumerate(pages): + if page.index % 2: + continue + offsets = page.dataoffsets + bytecounts = page.databytecounts + if i < npages: + lastoffset = pages[i + 1].dataoffsets[0] + else: + # LZW compressed strips might be longer than uncompressed + lastoffset = min(offsets[-1] + 2 * bytecounts[-1], self._fh.size) + offsets = offsets + (lastoffset,) + page.databytecounts = tuple( + offsets[j + 1] - offsets[j] for j in range(len(bytecounts)) + ) + + def __getattr__(self, name): + """Return 'is_flag' attributes from first page.""" + if name[3:] in TIFF.FILE_FLAGS: + if not self.pages: + return False + value = bool(getattr(self.pages[0], name)) + setattr(self, name, value) + return value + raise AttributeError( + "'%s' object has no attribute '%s'" % (self.__class__.__name__, name) + ) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def __str__(self, detail=0, width=79): + """Return string containing information about file. + + The detail parameter specifies the level of detail returned: + + 0: file only. + 1: all series, first page of series and its tags. + 2: large tag values and file metadata. + 3: all pages. + + """ + info = [ + "TiffFile '%s'", + format_size(self._fh.size), + {"<": "LittleEndian", ">": "BigEndian"}[self.byteorder], + ] + if self.is_bigtiff: + info.append("BigTiff") + info.append("|".join(f.upper() for f in self.flags)) + if len(self.pages) > 1: + info.append("%i Pages" % len(self.pages)) + if len(self.series) > 1: + info.append("%i Series" % len(self.series)) + if len(self._files) > 1: + info.append("%i Files" % (len(self._files))) + info = " ".join(info) + info = info.replace(" ", " ").replace(" ", " ") + info = info % snipstr(self._fh.name, max(12, width + 2 - len(info))) + if detail <= 0: + return info + info = [info] + info.append("\n".join(str(s) for s in self.series)) + if detail >= 3: + info.extend( + ( + TiffPage.__str__(p, detail=detail, width=width) + for p in self.pages + if p is not None + ) + ) + else: + info.extend( + ( + TiffPage.__str__(s.pages[0], detail=detail, width=width) + for s in self.series + if s.pages[0] is not None + ) + ) + if detail >= 2: + for name in sorted(self.flags): + if hasattr(self, name + "_metadata"): + m = getattr(self, name + "_metadata") + if m: + info.append( + "%s_METADATA\n%s" + % ( + name.upper(), + pformat(m, width=width, height=detail * 12), + ) + ) + return "\n\n".join(info).replace("\n\n\n", "\n\n") + + @lazyattr + def flags(self): + """Return set of file flags.""" + return set( + name.lower() + for name in sorted(TIFF.FILE_FLAGS) + if getattr(self, "is_" + name) + ) + + @lazyattr + def is_mdgel(self): + """File has MD Gel format.""" + try: + return self.pages[0].is_mdgel or self.pages[1].is_mdgel + except IndexError: + return False + + @property + def is_movie(self): + """Return if file is a movie.""" + return self.pages.useframes + + @lazyattr + def shaped_metadata(self): + """Return Tifffile metadata from JSON descriptions as dicts.""" + if not self.is_shaped: + return + return tuple( + json_description_metadata(s.pages[0].is_shaped) + for s in self.series + if s.stype.lower() == "shaped" + ) + + @lazyattr + def ome_metadata(self): + """Return OME XML as dict.""" + # TODO: remove this or return XML? + if not self.is_ome: + return + return xml2dict(self.pages[0].description)["OME"] + + @lazyattr + def qptiff_metadata(self): + """Return PerkinElmer-QPI-ImageDescription XML element as dict.""" + if not self.is_qptiff: + return + root = "PerkinElmer-QPI-ImageDescription" + xml = self.pages[0].description.replace(" " + root + " ", root) + return xml2dict(xml)[root] + + @lazyattr + def lsm_metadata(self): + """Return LSM metadata from CZ_LSMINFO tag as dict.""" + if not self.is_lsm: + return + return self.pages[0].tags["CZ_LSMINFO"].value + + @lazyattr + def stk_metadata(self): + """Return STK metadata from UIC tags as dict.""" + if not self.is_stk: + return + page = self.pages[0] + tags = page.tags + result = {} + result["NumberPlanes"] = tags["UIC2tag"].count + if page.description: + result["PlaneDescriptions"] = page.description.split("\0") + # result['plane_descriptions'] = stk_description_metadata( + # page.image_description) + if "UIC1tag" in tags: + result.update(tags["UIC1tag"].value) + if "UIC3tag" in tags: + result.update(tags["UIC3tag"].value) # wavelengths + if "UIC4tag" in tags: + result.update(tags["UIC4tag"].value) # override uic1 tags + uic2tag = tags["UIC2tag"].value + result["ZDistance"] = uic2tag["ZDistance"] + result["TimeCreated"] = uic2tag["TimeCreated"] + result["TimeModified"] = uic2tag["TimeModified"] + try: + result["DatetimeCreated"] = numpy.array( + [ + julian_datetime(*dt) + for dt in zip(uic2tag["DateCreated"], uic2tag["TimeCreated"]) + ], + dtype="datetime64[ns]", + ) + result["DatetimeModified"] = numpy.array( + [ + julian_datetime(*dt) + for dt in zip(uic2tag["DateModified"], uic2tag["TimeModified"]) + ], + dtype="datetime64[ns]", + ) + except ValueError as e: + warnings.warn("stk_metadata: %s" % e) + return result + + @lazyattr + def imagej_metadata(self): + """Return consolidated ImageJ metadata as dict.""" + if not self.is_imagej: + return + page = self.pages[0] + result = imagej_description_metadata(page.is_imagej) + if "IJMetadata" in page.tags: + try: + result.update(page.tags["IJMetadata"].value) + except Exception: + pass + return result + + @lazyattr + def fluoview_metadata(self): + """Return consolidated FluoView metadata as dict.""" + if not self.is_fluoview: + return + result = {} + page = self.pages[0] + result.update(page.tags["MM_Header"].value) + # TODO: read stamps from all pages + result["Stamp"] = page.tags["MM_Stamp"].value + # skip parsing image description; not reliable + # try: + # t = fluoview_description_metadata(page.image_description) + # if t is not None: + # result['ImageDescription'] = t + # except Exception as e: + # warnings.warn( + # "failed to read FluoView image description: %s" % e) + return result + + @lazyattr + def nih_metadata(self): + """Return NIH Image metadata from NIHImageHeader tag as dict.""" + if not self.is_nih: + return + return self.pages[0].tags["NIHImageHeader"].value + + @lazyattr + def fei_metadata(self): + """Return FEI metadata from SFEG or HELIOS tags as dict.""" + if not self.is_fei: + return + tags = self.pages[0].tags + if "FEI_SFEG" in tags: + return tags["FEI_SFEG"].value + if "FEI_HELIOS" in tags: + return tags["FEI_HELIOS"].value + + @lazyattr + def sem_metadata(self): + """Return SEM metadata from CZ_SEM tag as dict.""" + if not self.is_sem: + return + return self.pages[0].tags["CZ_SEM"].value + + @lazyattr + def mdgel_metadata(self): + """Return consolidated metadata from MD GEL tags as dict.""" + for page in self.pages[:2]: + if "MDFileTag" in page.tags: + tags = page.tags + break + else: + return + result = {} + for code in range(33445, 33453): + name = TIFF.TAGS[code] + if name not in tags: + continue + result[name[2:]] = tags[name].value + return result + + @lazyattr + def andor_metadata(self): + """Return Andor tags as dict.""" + return self.pages[0].andor_tags + + @lazyattr + def epics_metadata(self): + """Return EPICS areaDetector tags as dict.""" + return self.pages[0].epics_tags + + @lazyattr + def tvips_metadata(self): + """Return TVIPS tag as dict.""" + if not self.is_tvips: + return + return self.pages[0].tags["TVIPS"].value + + @lazyattr + def metaseries_metadata(self): + """Return MetaSeries metadata from image description as dict.""" + if not self.is_metaseries: + return + return metaseries_description_metadata(self.pages[0].description) + + @lazyattr + def pilatus_metadata(self): + """Return Pilatus metadata from image description as dict.""" + if not self.is_pilatus: + return + return pilatus_description_metadata(self.pages[0].description) + + @lazyattr + def micromanager_metadata(self): + """Return consolidated MicroManager metadata as dict.""" + if not self.is_micromanager: + return + # from file header + result = read_micromanager_metadata(self._fh) + # from tag + result.update(self.pages[0].tags["MicroManagerMetadata"].value) + return result + + @lazyattr + def scanimage_metadata(self): + """Return ScanImage non-varying frame and ROI metadata as dict.""" + if not self.is_scanimage: + return + result = {} + try: + framedata, roidata = read_scanimage_metadata(self._fh) + result["FrameData"] = framedata + result.update(roidata) + except ValueError: + pass + # TODO: scanimage_artist_metadata + try: + result["Description"] = scanimage_description_metadata( + self.pages[0].description + ) + except Exception as e: + warnings.warn("scanimage_description_metadata failed: %s" % e) + return result + + @property + def geotiff_metadata(self): + """Return GeoTIFF metadata from first page as dict.""" + if not self.is_geotiff: + return + return self.pages[0].geotiff_tags + + +class TiffPages(object): + """Sequence of TIFF image file directories.""" + + def __init__(self, parent): + """Initialize instance from file. Read first TiffPage from file. + + The file position must be at an offset to an offset to a TiffPage. + + """ + self.parent = parent + self.pages = [] # cache of TiffPages, TiffFrames, or their offsets + self.complete = False # True if offsets to all pages were read + self._tiffpage = TiffPage # class for reading tiff pages + self._keyframe = None + self._cache = True + + # read offset to first page + fh = parent.filehandle + self._nextpageoffset = fh.tell() + offset = struct.unpack(parent.offsetformat, fh.read(parent.offsetsize))[0] + + if offset == 0: + # warnings.warn('file contains no pages') + self.complete = True + return + if offset >= fh.size: + warnings.warn("invalid page offset (%i)" % offset) + self.complete = True + return + + # always read and cache first page + fh.seek(offset) + page = TiffPage(parent, index=0) + self.pages.append(page) + self._keyframe = page + + @property + def cache(self): + """Return if pages/frames are currently being cached.""" + return self._cache + + @cache.setter + def cache(self, value): + """Enable or disable caching of pages/frames. Clear cache if False.""" + value = bool(value) + if self._cache and not value: + self.clear() + self._cache = value + + @property + def useframes(self): + """Return if currently using TiffFrame (True) or TiffPage (False).""" + return self._tiffpage == TiffFrame and TiffFrame is not TiffPage + + @useframes.setter + def useframes(self, value): + """Set to use TiffFrame (True) or TiffPage (False).""" + self._tiffpage = TiffFrame if value else TiffPage + + @property + def keyframe(self): + """Return index of current keyframe.""" + return self._keyframe.index + + @keyframe.setter + def keyframe(self, index): + """Set current keyframe. Load TiffPage from file if necessary.""" + if self._keyframe.index == index: + return + if self.complete or 0 <= index < len(self.pages): + page = self.pages[index] + if isinstance(page, TiffPage): + self._keyframe = page + return + elif isinstance(page, TiffFrame): + # remove existing frame + self.pages[index] = page.offset + # load TiffPage from file + useframes = self.useframes + self._tiffpage = TiffPage + self._keyframe = self[index] + self.useframes = useframes + + @property + def next_page_offset(self): + """Return offset where offset to a new page can be stored.""" + if not self.complete: + self._seek(-1) + return self._nextpageoffset + + def load(self): + """Read all remaining pages from file.""" + fh = self.parent.filehandle + keyframe = self._keyframe + pages = self.pages + if not self.complete: + self._seek(-1) + for i, page in enumerate(pages): + if isinstance(page, inttypes): + fh.seek(page) + page = self._tiffpage(self.parent, index=i, keyframe=keyframe) + pages[i] = page + + def clear(self, fully=True): + """Delete all but first page from cache. Set keyframe to first page.""" + pages = self.pages + if not self._cache or len(pages) < 1: + return + self._keyframe = pages[0] + if fully: + # delete all but first TiffPage/TiffFrame + for i, page in enumerate(pages[1:]): + if not isinstance(page, inttypes): + pages[i + 1] = page.offset + elif TiffFrame is not TiffPage: + # delete only TiffFrames + for i, page in enumerate(pages): + if isinstance(page, TiffFrame): + pages[i] = page.offset + + def _seek(self, index, maxpages=2**22): + """Seek file to offset of specified page.""" + pages = self.pages + if not pages: + return + + fh = self.parent.filehandle + if fh.closed: + raise RuntimeError("FileHandle is closed") + + if self.complete or 0 <= index < len(pages): + page = pages[index] + offset = page if isinstance(page, inttypes) else page.offset + fh.seek(offset) + return + + offsetformat = self.parent.offsetformat + offsetsize = self.parent.offsetsize + tagnoformat = self.parent.tagnoformat + tagnosize = self.parent.tagnosize + tagsize = self.parent.tagsize + unpack = struct.unpack + + page = pages[-1] + offset = page if isinstance(page, inttypes) else page.offset + + while len(pages) < maxpages: + # read offsets to pages from file until index is reached + fh.seek(offset) + # skip tags + try: + tagno = unpack(tagnoformat, fh.read(tagnosize))[0] + if tagno > 4096: + raise ValueError("suspicious number of tags") + except Exception: + warnings.warn("corrupted tag list at offset %i" % offset) + del pages[-1] + self.complete = True + break + self._nextpageoffset = offset + tagnosize + tagno * tagsize + fh.seek(self._nextpageoffset) + + # read offset to next page + offset = unpack(offsetformat, fh.read(offsetsize))[0] + if offset == 0: + self.complete = True + break + if offset >= fh.size: + warnings.warn("invalid page offset (%i)" % offset) + self.complete = True + break + + pages.append(offset) + if 0 <= index < len(pages): + break + + if index >= len(pages): + raise IndexError("list index out of range") + + page = pages[index] + fh.seek(page if isinstance(page, inttypes) else page.offset) + + def __bool__(self): + """Return True if file contains any pages.""" + return len(self.pages) > 0 + + def __len__(self): + """Return number of pages in file.""" + if not self.complete: + self._seek(-1) + return len(self.pages) + + def __getitem__(self, key): + """Return specified page(s) from cache or file.""" + pages = self.pages + if not pages: + raise IndexError("list index out of range") + if key == 0: + return pages[key] + + if isinstance(key, slice): + start, stop, _ = key.indices(2**31 - 1) + if not self.complete and max(stop, start) > len(pages): + self._seek(-1) + return [self[i] for i in range(*key.indices(len(pages)))] + + if self.complete and key >= len(pages): + raise IndexError("list index out of range") + + try: + page = pages[key] + except IndexError: + page = 0 + if not isinstance(page, inttypes): + return page + + self._seek(key) + page = self._tiffpage(self.parent, index=key, keyframe=self._keyframe) + if self._cache: + pages[key] = page + return page + + def __iter__(self): + """Return iterator over all pages.""" + i = 0 + while True: + try: + yield self[i] + i += 1 + except IndexError: + break + + +class TiffPage(object): + """TIFF image file directory (IFD). + + Attributes + ---------- + index : int + Index of page in file. + dtype : numpy.dtype or None + Data type (native byte order) of the image in IFD. + shape : tuple + Dimensions of the image in IFD. + axes : str + Axes label codes: + 'X' width, 'Y' height, 'S' sample, 'I' image series|page|plane, + 'Z' depth, 'C' color|em-wavelength|channel, 'E' ex-wavelength|lambda, + 'T' time, 'R' region|tile, 'A' angle, 'P' phase, 'H' lifetime, + 'L' exposure, 'V' event, 'Q' unknown, '_' missing + tags : dict + Dictionary of tags in IFD. {tag.name: TiffTag} + colormap : numpy.ndarray + Color look up table, if exists. + + All attributes are read-only. + + Notes + ----- + The internal, normalized '_shape' attribute is 6 dimensional: + + 0 : number planes/images (stk, ij). + 1 : planar samplesperpixel. + 2 : imagedepth Z (sgi). + 3 : imagelength Y. + 4 : imagewidth X. + 5 : contig samplesperpixel. + + """ + + # default properties; will be updated from tags + imagewidth = 0 + imagelength = 0 + imagedepth = 1 + tilewidth = 0 + tilelength = 0 + tiledepth = 1 + bitspersample = 1 + samplesperpixel = 1 + sampleformat = 1 + rowsperstrip = 2**32 - 1 + compression = 1 + planarconfig = 1 + fillorder = 1 + photometric = 0 + predictor = 1 + extrasamples = 1 + colormap = None + software = "" + description = "" + description1 = "" + + def __init__(self, parent, index, keyframe=None): + """Initialize instance from file. + + The file handle position must be at offset to a valid IFD. + + """ + self.parent = parent + self.index = index + self.shape = () + self._shape = () + self.dtype = None + self._dtype = None + self.axes = "" + self.tags = {} + + self.dataoffsets = () + self.databytecounts = () + + # read TIFF IFD structure and its tags from file + fh = parent.filehandle + self.offset = fh.tell() # offset to this IFD + try: + tagno = struct.unpack(parent.tagnoformat, fh.read(parent.tagnosize))[0] + if tagno > 4096: + raise ValueError("suspicious number of tags") + except Exception: + raise ValueError("corrupted tag list at offset %i" % self.offset) + + tagsize = parent.tagsize + data = fh.read(tagsize * tagno) + tags = self.tags + index = -tagsize + for _ in range(tagno): + index += tagsize + try: + tag = TiffTag(self.parent, data[index : index + tagsize]) + except TiffTag.Error as e: + warnings.warn(str(e)) + continue + tagname = tag.name + if tagname not in tags: + name = tagname + tags[name] = tag + else: + # some files contain multiple tags with same code + # e.g. MicroManager files contain two ImageDescription tags + i = 1 + while True: + name = "%s%i" % (tagname, i) + if name not in tags: + tags[name] = tag + break + name = TIFF.TAG_ATTRIBUTES.get(name, "") + if name: + if name[:3] in "sof des" and not isinstance(tag.value, str): + pass # wrong string type for software, description + else: + setattr(self, name, tag.value) + + if not tags: + return # found in FIBICS + + # consolidate private tags; remove them from self.tags + if self.is_andor: + self.andor_tags + elif self.is_epics: + self.epics_tags + + if self.is_lsm or (self.index and self.parent.is_lsm): + # correct non standard LSM bitspersample tags + self.tags["BitsPerSample"]._fix_lsm_bitspersample(self) + + if self.is_vista or (self.index and self.parent.is_vista): + # ISS Vista writes wrong ImageDepth tag + self.imagedepth = 1 + + if self.is_stk and "UIC1tag" in tags and not tags["UIC1tag"].value: + # read UIC1tag now that plane count is known + uic1tag = tags["UIC1tag"] + fh.seek(uic1tag.valueoffset) + tags["UIC1tag"].value = read_uic1tag( + fh, + self.parent.byteorder, + uic1tag.dtype, + uic1tag.count, + None, + tags["UIC2tag"].count, + ) + + if "IJMetadata" in tags: + # decode IJMetadata tag + try: + tags["IJMetadata"].value = imagej_metadata( + tags["IJMetadata"].value, + tags["IJMetadataByteCounts"].value, + self.parent.byteorder, + ) + except Exception as e: + warnings.warn(str(e)) + + if "BitsPerSample" in tags: + tag = tags["BitsPerSample"] + if tag.count == 1: + self.bitspersample = tag.value + else: + # LSM might list more items than samplesperpixel + value = tag.value[: self.samplesperpixel] + if any((v - value[0] for v in value)): + self.bitspersample = value + else: + self.bitspersample = value[0] + + if "SampleFormat" in tags: + tag = tags["SampleFormat"] + if tag.count == 1: + self.sampleformat = tag.value + else: + value = tag.value[: self.samplesperpixel] + if any((v - value[0] for v in value)): + self.sampleformat = value + else: + self.sampleformat = value[0] + + if "ImageLength" in tags: + if "RowsPerStrip" not in tags or tags["RowsPerStrip"].count > 1: + self.rowsperstrip = self.imagelength + # self.stripsperimage = int(math.floor( + # float(self.imagelength + self.rowsperstrip - 1) / + # self.rowsperstrip)) + + # determine dtype + dtype = self.sampleformat, self.bitspersample + dtype = TIFF.SAMPLE_DTYPES.get(dtype, None) + if dtype is not None: + dtype = numpy.dtype(dtype) + self.dtype = self._dtype = dtype + + # determine shape of data + imagelength = self.imagelength + imagewidth = self.imagewidth + imagedepth = self.imagedepth + samplesperpixel = self.samplesperpixel + + if self.is_stk: + assert self.imagedepth == 1 + uictag = tags["UIC2tag"].value + planes = tags["UIC2tag"].count + if self.planarconfig == 1: + self._shape = (planes, 1, 1, imagelength, imagewidth, samplesperpixel) + if samplesperpixel == 1: + self.shape = (planes, imagelength, imagewidth) + self.axes = "YX" + else: + self.shape = (planes, imagelength, imagewidth, samplesperpixel) + self.axes = "YXS" + else: + self._shape = (planes, samplesperpixel, 1, imagelength, imagewidth, 1) + if samplesperpixel == 1: + self.shape = (planes, imagelength, imagewidth) + self.axes = "YX" + else: + self.shape = (planes, samplesperpixel, imagelength, imagewidth) + self.axes = "SYX" + # detect type of series + if planes == 1: + self.shape = self.shape[1:] + elif numpy.all(uictag["ZDistance"] != 0): + self.axes = "Z" + self.axes + elif numpy.all(numpy.diff(uictag["TimeCreated"]) != 0): + self.axes = "T" + self.axes + else: + self.axes = "I" + self.axes + elif self.photometric == 2 or samplesperpixel > 1: # PHOTOMETRIC.RGB + if self.planarconfig == 1: + self._shape = ( + 1, + 1, + imagedepth, + imagelength, + imagewidth, + samplesperpixel, + ) + if imagedepth == 1: + self.shape = (imagelength, imagewidth, samplesperpixel) + self.axes = "YXS" + else: + self.shape = (imagedepth, imagelength, imagewidth, samplesperpixel) + self.axes = "ZYXS" + else: + self._shape = ( + 1, + samplesperpixel, + imagedepth, + imagelength, + imagewidth, + 1, + ) + if imagedepth == 1: + self.shape = (samplesperpixel, imagelength, imagewidth) + self.axes = "SYX" + else: + self.shape = (samplesperpixel, imagedepth, imagelength, imagewidth) + self.axes = "SZYX" + else: + self._shape = (1, 1, imagedepth, imagelength, imagewidth, 1) + if imagedepth == 1: + self.shape = (imagelength, imagewidth) + self.axes = "YX" + else: + self.shape = (imagedepth, imagelength, imagewidth) + self.axes = "ZYX" + + # dataoffsets and databytecounts + if "TileOffsets" in tags: + self.dataoffsets = tags["TileOffsets"].value + elif "StripOffsets" in tags: + self.dataoffsets = tags["StripOffsets"].value + else: + self.dataoffsets = (0,) + + if "TileByteCounts" in tags: + self.databytecounts = tags["TileByteCounts"].value + elif "StripByteCounts" in tags: + self.databytecounts = tags["StripByteCounts"].value + else: + self.databytecounts = (product(self.shape) * (self.bitspersample // 8),) + if self.compression != 1: + warnings.warn("required ByteCounts tag is missing") + + assert len(self.shape) == len(self.axes) + + def asarray( + self, + out=None, + squeeze=True, + lock=None, + reopen=True, + maxsize=2**44, + validate=True, + ): + """Read image data from file and return as numpy array. + + Raise ValueError if format is unsupported. + + Parameters + ---------- + out : numpy.ndarray, str, or file-like object; optional + Buffer where image data will be saved. + If None (default), a new array will be created. + If numpy.ndarray, a writable array of compatible dtype and shape. + If 'memmap', directly memory-map the image data in the TIFF file + if possible; else create a memory-mapped array in a temporary file. + If str or open file, the file name or file object used to + create a memory-map to an array stored in a binary file on disk. + squeeze : bool + If True, all length-1 dimensions (except X and Y) are + squeezed out from the array. + If False, the shape of the returned array might be different from + the page.shape. + lock : {RLock, NullContext} + A reentrant lock used to synchronize reads from file. + If None (default), the lock of the parent's filehandle is used. + reopen : bool + If True (default) and the parent file handle is closed, the file + is temporarily re-opened and closed if no exception occurs. + maxsize: int or None + Maximum size of data before a ValueError is raised. + Can be used to catch DOS. Default: 16 TB. + validate : bool + If True (default), validate various parameters. + If None, only validate parameters and return None. + + """ + self_ = self + self = self.keyframe # self or keyframe + + if not self._shape or product(self._shape) == 0: + return + + tags = self.tags + + if validate or validate is None: + if maxsize and product(self._shape) > maxsize: + raise ValueError("data are too large %s" % str(self._shape)) + if self.dtype is None: + raise ValueError( + "data type not supported: %s%i" + % (self.sampleformat, self.bitspersample) + ) + if self.compression not in TIFF.DECOMPESSORS: + raise ValueError("cannot decompress %s" % self.compression.name) + if "SampleFormat" in tags: + tag = tags["SampleFormat"] + if tag.count != 1 and any((i - tag.value[0] for i in tag.value)): + raise ValueError("sample formats do not match %s" % tag.value) + if self.is_chroma_subsampled and ( + self.compression != 7 or self.planarconfig == 2 + ): + raise NotImplementedError("chroma subsampling not supported") + if validate is None: + return + + fh = self_.parent.filehandle + lock = fh.lock if lock is None else lock + with lock: + closed = fh.closed + if closed: + if reopen: + fh.open() + else: + raise IOError("file handle is closed") + + dtype = self._dtype + shape = self._shape + imagewidth = self.imagewidth + imagelength = self.imagelength + imagedepth = self.imagedepth + bitspersample = self.bitspersample + typecode = self.parent.byteorder + dtype.char + lsb2msb = self.fillorder == 2 + offsets, bytecounts = self_.offsets_bytecounts + istiled = self.is_tiled + + if istiled: + tilewidth = self.tilewidth + tilelength = self.tilelength + tiledepth = self.tiledepth + tw = (imagewidth + tilewidth - 1) // tilewidth + tl = (imagelength + tilelength - 1) // tilelength + td = (imagedepth + tiledepth - 1) // tiledepth + shape = ( + shape[0], + shape[1], + td * tiledepth, + tl * tilelength, + tw * tilewidth, + shape[-1], + ) + tileshape = (tiledepth, tilelength, tilewidth, shape[-1]) + runlen = tilewidth + else: + runlen = imagewidth + + if self.planarconfig == 1: + runlen *= self.samplesperpixel + + if out == "memmap" and self.is_memmappable: + with lock: + result = fh.memmap_array(typecode, shape, offset=offsets[0]) + elif self.is_contiguous: + if out is not None: + out = create_output(out, shape, dtype) + with lock: + fh.seek(offsets[0]) + result = fh.read_array(typecode, product(shape), out=out) + if out is None and not result.dtype.isnative: + # swap byte order and dtype without copy + result.byteswap(True) + result = result.newbyteorder() + if lsb2msb: + reverse_bitorder(result) + else: + result = create_output(out, shape, dtype) + + decompress = TIFF.DECOMPESSORS[self.compression] + + if self.compression == 7: # COMPRESSION.JPEG + if bitspersample not in (8, 12): + raise ValueError("unsupported JPEG precision %i" % bitspersample) + if "JPEGTables" in tags: + table = tags["JPEGTables"].value + else: + table = b"" + unpack = identityfunc + colorspace = TIFF.PHOTOMETRIC(self.photometric).name + + def decompress( + x, + func=decompress, + table=table, + bitspersample=bitspersample, + colorspace=colorspace, + ): + return func(x, table, bitspersample, colorspace).reshape(-1) + + elif bitspersample in (8, 16, 32, 64, 128): + if (bitspersample * runlen) % 8: + raise ValueError("data and sample size mismatch") + + def unpack(x, typecode=typecode): + if self.predictor == 3: # PREDICTOR.FLOATINGPOINT + # the floating point horizontal differencing decoder + # needs the raw byte order + typecode = dtype.char + try: + # read only numpy array + return numpy.frombuffer(x, typecode) + except ValueError: + # strips may be missing EOI + # warnings.warn('unpack: %s' % e) + xlen = (len(x) // (bitspersample // 8)) * (bitspersample // 8) + return numpy.frombuffer(x[:xlen], typecode) + + elif isinstance(bitspersample, tuple): + + def unpack(x, typecode=typecode, bitspersample=bitspersample): + return unpack_rgb(x, typecode, bitspersample) + + else: + + def unpack( + x, typecode=typecode, bitspersample=bitspersample, runlen=runlen + ): + return unpack_ints(x, typecode, bitspersample, runlen) + + if istiled: + writable = None + tw, tl, td, pl = 0, 0, 0, 0 + for tile in buffered_read(fh, lock, offsets, bytecounts): + if lsb2msb: + tile = reverse_bitorder(tile) + tile = decompress(tile) + tile = unpack(tile) + try: + tile.shape = tileshape + except ValueError: + # incomplete tiles; see gdal issue #1179 + warnings.warn("invalid tile data") + t = numpy.zeros(tileshape, dtype).reshape(-1) + s = min(tile.size, t.size) + t[:s] = tile[:s] + tile = t.reshape(tileshape) + if self.predictor == 2: # PREDICTOR.HORIZONTAL + if writable is None: + writable = tile.flags["WRITEABLE"] + if writable: + numpy.cumsum(tile, axis=-2, dtype=dtype, out=tile) + else: + tile = numpy.cumsum(tile, axis=-2, dtype=dtype) + elif self.predictor == 3: # PREDICTOR.FLOATINGPOINT + raise NotImplementedError() + result[ + 0, + pl, + td : td + tiledepth, + tl : tl + tilelength, + tw : tw + tilewidth, + :, + ] = tile + del tile + tw += tilewidth + if tw >= shape[4]: + tw, tl = 0, tl + tilelength + if tl >= shape[3]: + tl, td = 0, td + tiledepth + if td >= shape[2]: + td, pl = 0, pl + 1 + result = result[..., :imagedepth, :imagelength, :imagewidth, :] + else: + strip_size = self.rowsperstrip * self.imagewidth + if self.planarconfig == 1: + strip_size *= self.samplesperpixel + result = result.reshape(-1) + index = 0 + for strip in buffered_read(fh, lock, offsets, bytecounts): + if lsb2msb: + strip = reverse_bitorder(strip) + strip = decompress(strip) + strip = unpack(strip) + size = min(result.size, strip.size, strip_size, result.size - index) + result[index : index + size] = strip[:size] + del strip + index += size + + result.shape = self._shape + + if self.predictor != 1 and not (istiled and not self.is_contiguous): + if self.parent.is_lsm and self.compression == 1: + pass # work around bug in LSM510 software + elif self.predictor == 2: # PREDICTOR.HORIZONTAL + numpy.cumsum(result, axis=-2, dtype=dtype, out=result) + elif self.predictor == 3: # PREDICTOR.FLOATINGPOINT + result = decode_floats(result) + + if squeeze: + try: + result.shape = self.shape + except ValueError: + warnings.warn( + "failed to reshape from %s to %s" + % (str(result.shape), str(self.shape)) + ) + + if closed: + # TODO: file should remain open if an exception occurred above + fh.close() + return result + + def asrgb( + self, + uint8=False, + alpha=None, + colormap=None, + dmin=None, + dmax=None, + *args, + **kwargs + ): + """Return image data as RGB(A). + + Work in progress. + + """ + data = self.asarray(*args, **kwargs) + self = self.keyframe # self or keyframe + photometric = self.photometric + PHOTOMETRIC = TIFF.PHOTOMETRIC + + if photometric == PHOTOMETRIC.PALETTE: + colormap = self.colormap + if colormap.shape[1] < 2**self.bitspersample or self.dtype.char not in "BH": + raise ValueError("cannot apply colormap") + if uint8: + if colormap.max() > 255: + colormap >>= 8 + colormap = colormap.astype("uint8") + if "S" in self.axes: + data = data[..., 0] if self.planarconfig == 1 else data[0] + data = apply_colormap(data, colormap) + + elif photometric == PHOTOMETRIC.RGB: + if "ExtraSamples" in self.tags: + if alpha is None: + alpha = TIFF.EXTRASAMPLE + extrasamples = self.extrasamples + if self.tags["ExtraSamples"].count == 1: + extrasamples = (extrasamples,) + for i, exs in enumerate(extrasamples): + if exs in alpha: + if self.planarconfig == 1: + data = data[..., [0, 1, 2, 3 + i]] + else: + data = data[:, [0, 1, 2, 3 + i]] + break + else: + if self.planarconfig == 1: + data = data[..., :3] + else: + data = data[:, :3] + # TODO: convert to uint8? + + elif photometric == PHOTOMETRIC.MINISBLACK: + raise NotImplementedError() + elif photometric == PHOTOMETRIC.MINISWHITE: + raise NotImplementedError() + elif photometric == PHOTOMETRIC.SEPARATED: + raise NotImplementedError() + else: + raise NotImplementedError() + return data + + def aspage(self): + return self + + @property + def keyframe(self): + return self + + @keyframe.setter + def keyframe(self, index): + return + + @lazyattr + def offsets_bytecounts(self): + """Return simplified offsets and bytecounts.""" + if self.is_contiguous: + offset, byte_count = self.is_contiguous + return [offset], [byte_count] + return clean_offsets_counts(self.dataoffsets, self.databytecounts) + + @lazyattr + def is_contiguous(self): + """Return offset and size of contiguous data, else None. + + Excludes prediction and fill_order. + + """ + if self.compression != 1 or self.bitspersample not in (8, 16, 32, 64): + return + if "TileWidth" in self.tags: + if ( + self.imagewidth != self.tilewidth + or self.imagelength % self.tilelength + or self.tilewidth % 16 + or self.tilelength % 16 + ): + return + if ( + "ImageDepth" in self.tags + and "TileDepth" in self.tags + and ( + self.imagelength != self.tilelength + or self.imagedepth % self.tiledepth + ) + ): + return + + offsets = self.dataoffsets + bytecounts = self.databytecounts + if len(offsets) == 1: + return offsets[0], bytecounts[0] + if self.is_stk or all( + ( + offsets[i] + bytecounts[i] == offsets[i + 1] or bytecounts[i + 1] == 0 + ) # no data/ignore offset + for i in range(len(offsets) - 1) + ): + return offsets[0], sum(bytecounts) + + @lazyattr + def is_final(self): + """Return if page's image data are stored in final form. + + Excludes byte-swapping. + + """ + return ( + self.is_contiguous + and self.fillorder == 1 + and self.predictor == 1 + and not self.is_chroma_subsampled + ) + + @lazyattr + def is_memmappable(self): + """Return if page's image data in file can be memory-mapped.""" + return ( + self.parent.filehandle.is_file + and self.is_final + and + # (self.bitspersample == 8 or self.parent.isnative) and + self.is_contiguous[0] % self.dtype.itemsize == 0 + ) # aligned? + + def __str__(self, detail=0, width=79): + """Return string containing information about page.""" + if self.keyframe != self: + return TiffFrame.__str__(self, detail) + attr = "" + for name in ("memmappable", "final", "contiguous"): + attr = getattr(self, "is_" + name) + if attr: + attr = name.upper() + break + info = " ".join( + s + for s in ( + "x".join(str(i) for i in self.shape), + "%s%s" + % (TIFF.SAMPLEFORMAT(self.sampleformat).name, self.bitspersample), + "|".join( + i + for i in ( + TIFF.PHOTOMETRIC(self.photometric).name, + "TILED" if self.is_tiled else "", + self.compression.name if self.compression != 1 else "", + self.planarconfig.name if self.planarconfig != 1 else "", + self.predictor.name if self.predictor != 1 else "", + self.fillorder.name if self.fillorder != 1 else "", + ) + if i + ), + attr, + "|".join((f.upper() for f in self.flags)), + ) + if s + ) + info = "TiffPage %i @%i %s" % (self.index, self.offset, info) + if detail <= 0: + return info + info = [info] + tags = self.tags + tlines = [] + vlines = [] + for tag in sorted(tags.values(), key=lambda x: x.code): + value = tag.__str__(width=width + 1) + tlines.append(value[:width].strip()) + if detail > 1 and len(value) > width: + name = tag.name.upper() + if detail <= 2 and ("COUNTS" in name or "OFFSETS" in name): + value = pformat(tag.value, width=width, height=detail * 4) + else: + value = pformat(tag.value, width=width, height=detail * 12) + vlines.append("%s\n%s" % (tag.name, value)) + info.append("\n".join(tlines)) + if detail > 1: + info.append("\n\n".join(vlines)) + if detail > 3: + try: + info.append( + "DATA\n%s" % pformat(self.asarray(), width=width, height=detail * 8) + ) + except Exception: + pass + return "\n\n".join(info) + + @lazyattr + def flags(self): + """Return set of flags.""" + return set( + ( + name.lower() + for name in sorted(TIFF.FILE_FLAGS) + if getattr(self, "is_" + name) + ) + ) + + @property + def ndim(self): + """Return number of array dimensions.""" + return len(self.shape) + + @property + def size(self): + """Return number of elements in array.""" + return product(self.shape) + + @lazyattr + def andor_tags(self): + """Return consolidated metadata from Andor tags as dict. + + Remove Andor tags from self.tags. + + """ + if not self.is_andor: + return + tags = self.tags + result = {"Id": tags["AndorId"].value} + for tag in list(self.tags.values()): + code = tag.code + if not 4864 < code < 5031: + continue + value = tag.value + name = tag.name[5:] if len(tag.name) > 5 else tag.name + result[name] = value + del tags[tag.name] + return result + + @lazyattr + def epics_tags(self): + """Return consolidated metadata from EPICS areaDetector tags as dict. + + Remove areaDetector tags from self.tags. + + """ + if not self.is_epics: + return + result = {} + tags = self.tags + for tag in list(self.tags.values()): + code = tag.code + if not 65000 <= code < 65500: + continue + value = tag.value + if code == 65000: + result["timeStamp"] = datetime.datetime.fromtimestamp(float(value)) + elif code == 65001: + result["uniqueID"] = int(value) + elif code == 65002: + result["epicsTSSec"] = int(value) + elif code == 65003: + result["epicsTSNsec"] = int(value) + else: + key, value = value.split(":", 1) + result[key] = astype(value) + del tags[tag.name] + return result + + @lazyattr + def geotiff_tags(self): + """Return consolidated metadata from GeoTIFF tags as dict.""" + if not self.is_geotiff: + return + tags = self.tags + + gkd = tags["GeoKeyDirectoryTag"].value + if gkd[0] != 1: + warnings.warn("invalid GeoKeyDirectoryTag") + return {} + + result = { + "KeyDirectoryVersion": gkd[0], + "KeyRevision": gkd[1], + "KeyRevisionMinor": gkd[2], + # 'NumberOfKeys': gkd[3], + } + # deltags = ['GeoKeyDirectoryTag'] + geokeys = TIFF.GEO_KEYS + geocodes = TIFF.GEO_CODES + for index in range(gkd[3]): + keyid, tagid, count, offset = gkd[4 + index * 4 : index * 4 + 8] + keyid = geokeys.get(keyid, keyid) + if tagid == 0: + value = offset + else: + tagname = TIFF.TAGS[tagid] + # deltags.append(tagname) + value = tags[tagname].value[offset : offset + count] + if tagid == 34737 and count > 1 and value[-1] == "|": + value = value[:-1] + value = value if count > 1 else value[0] + if keyid in geocodes: + try: + value = geocodes[keyid](value) + except Exception: + pass + result[keyid] = value + + if "IntergraphMatrixTag" in tags: + value = tags["IntergraphMatrixTag"].value + value = numpy.array(value) + if len(value) == 16: + value = value.reshape((4, 4)).tolist() + result["IntergraphMatrix"] = value + if "ModelPixelScaleTag" in tags: + value = numpy.array(tags["ModelPixelScaleTag"].value).tolist() + result["ModelPixelScale"] = value + if "ModelTiepointTag" in tags: + value = tags["ModelTiepointTag"].value + value = numpy.array(value).reshape((-1, 6)).squeeze().tolist() + result["ModelTiepoint"] = value + if "ModelTransformationTag" in tags: + value = tags["ModelTransformationTag"].value + value = numpy.array(value).reshape((4, 4)).tolist() + result["ModelTransformation"] = value + elif False: + # if 'ModelPixelScaleTag' in tags and 'ModelTiepointTag' in tags: + sx, sy, sz = tags["ModelPixelScaleTag"].value + tiepoints = tags["ModelTiepointTag"].value + transforms = [] + for tp in range(0, len(tiepoints), 6): + i, j, k, x, y, z = tiepoints[tp : tp + 6] + transforms.append( + [ + [sx, 0.0, 0.0, x - i * sx], + [0.0, -sy, 0.0, y + j * sy], + [0.0, 0.0, sz, z - k * sz], + [0.0, 0.0, 0.0, 1.0], + ] + ) + if len(tiepoints) == 6: + transforms = transforms[0] + result["ModelTransformation"] = transforms + + if "RPCCoefficientTag" in tags: + rpcc = tags["RPCCoefficientTag"].value + result["RPCCoefficient"] = { + "ERR_BIAS": rpcc[0], + "ERR_RAND": rpcc[1], + "LINE_OFF": rpcc[2], + "SAMP_OFF": rpcc[3], + "LAT_OFF": rpcc[4], + "LONG_OFF": rpcc[5], + "HEIGHT_OFF": rpcc[6], + "LINE_SCALE": rpcc[7], + "SAMP_SCALE": rpcc[8], + "LAT_SCALE": rpcc[9], + "LONG_SCALE": rpcc[10], + "HEIGHT_SCALE": rpcc[11], + "LINE_NUM_COEFF": rpcc[12:33], + "LINE_DEN_COEFF ": rpcc[33:53], + "SAMP_NUM_COEFF": rpcc[53:73], + "SAMP_DEN_COEFF": rpcc[73:], + } + + return result + + @property + def is_tiled(self): + """Page contains tiled image.""" + return "TileWidth" in self.tags + + @property + def is_reduced(self): + """Page is reduced image of another image.""" + return "NewSubfileType" in self.tags and self.tags["NewSubfileType"].value & 1 + + @property + def is_chroma_subsampled(self): + """Page contains chroma subsampled image.""" + return "YCbCrSubSampling" in self.tags and self.tags[ + "YCbCrSubSampling" + ].value != (1, 1) + + @lazyattr + def is_imagej(self): + """Return ImageJ description if exists, else None.""" + for description in (self.description, self.description1): + if not description: + return + if description[:7] == "ImageJ=": + return description + + @lazyattr + def is_shaped(self): + """Return description containing array shape if exists, else None.""" + for description in (self.description, self.description1): + if not description: + return + if description[:1] == "{" and '"shape":' in description: + return description + if description[:6] == "shape=": + return description + + @property + def is_mdgel(self): + """Page contains MDFileTag tag.""" + return "MDFileTag" in self.tags + + @property + def is_mediacy(self): + """Page contains Media Cybernetics Id tag.""" + return "MC_Id" in self.tags and self.tags["MC_Id"].value[:7] == b"MC TIFF" + + @property + def is_stk(self): + """Page contains UIC2Tag tag.""" + return "UIC2tag" in self.tags + + @property + def is_lsm(self): + """Page contains CZ_LSMINFO tag.""" + return "CZ_LSMINFO" in self.tags + + @property + def is_fluoview(self): + """Page contains FluoView MM_STAMP tag.""" + return "MM_Stamp" in self.tags + + @property + def is_nih(self): + """Page contains NIH image header.""" + return "NIHImageHeader" in self.tags + + @property + def is_sgi(self): + """Page contains SGI image and tile depth tags.""" + return "ImageDepth" in self.tags and "TileDepth" in self.tags + + @property + def is_vista(self): + """Software tag is 'ISS Vista'.""" + return self.software == "ISS Vista" + + @property + def is_metaseries(self): + """Page contains MDS MetaSeries metadata in ImageDescription tag.""" + if self.index > 1 or self.software != "MetaSeries": + return False + d = self.description + return d.startswith("") and d.endswith("") + + @property + def is_ome(self): + """Page contains OME-XML in ImageDescription tag.""" + if self.index > 1 or not self.description: + return False + d = self.description + return d[:14] == "" + + @property + def is_scn(self): + """Page contains Leica SCN XML in ImageDescription tag.""" + if self.index > 1 or not self.description: + return False + d = self.description + return d[:14] == "" + + @property + def is_micromanager(self): + """Page contains Micro-Manager metadata.""" + return "MicroManagerMetadata" in self.tags + + @property + def is_andor(self): + """Page contains Andor Technology tags.""" + return "AndorId" in self.tags + + @property + def is_pilatus(self): + """Page contains Pilatus tags.""" + return self.software[:8] == "TVX TIFF" and self.description[:2] == "# " + + @property + def is_epics(self): + """Page contains EPICS areaDetector tags.""" + return ( + self.description == "EPICS areaDetector" + or self.software == "EPICS areaDetector" + ) + + @property + def is_tvips(self): + """Page contains TVIPS metadata.""" + return "TVIPS" in self.tags + + @property + def is_fei(self): + """Page contains SFEG or HELIOS metadata.""" + return "FEI_SFEG" in self.tags or "FEI_HELIOS" in self.tags + + @property + def is_sem(self): + """Page contains Zeiss SEM metadata.""" + return "CZ_SEM" in self.tags + + @property + def is_svs(self): + """Page contains Aperio metadata.""" + return self.description[:20] == "Aperio Image Library" + + @property + def is_scanimage(self): + """Page contains ScanImage metadata.""" + return ( + self.description[:12] == "state.config" + or self.software[:22] == "SI.LINE_FORMAT_VERSION" + or "scanimage.SI." in self.description[-256:] + ) + + @property + def is_qptiff(self): + """Page contains PerkinElmer tissue images metadata.""" + # The ImageDescription tag contains XML with a top-level + # element + return self.software[:15] == "PerkinElmer-QPI" + + @property + def is_geotiff(self): + """Page contains GeoTIFF metadata.""" + return "GeoKeyDirectoryTag" in self.tags + + +class TiffFrame(object): + """Lightweight TIFF image file directory (IFD). + + Only a limited number of tag values are read from file, e.g. StripOffsets, + and StripByteCounts. Other tag values are assumed to be identical with a + specified TiffPage instance, the keyframe. + + TiffFrame is intended to reduce resource usage and speed up reading data + from file, not for introspection of metadata. + + Not compatible with Python 2. + + """ + + __slots__ = ( + "keyframe", + "parent", + "index", + "offset", + "dataoffsets", + "databytecounts", + ) + + is_mdgel = False + tags = {} + + def __init__(self, parent, index, keyframe): + """Read specified tags from file. + + The file handle position must be at the offset to a valid IFD. + + """ + self.keyframe = keyframe + self.parent = parent + self.index = index + self.dataoffsets = None + self.databytecounts = None + + unpack = struct.unpack + fh = parent.filehandle + self.offset = fh.tell() + try: + tagno = unpack(parent.tagnoformat, fh.read(parent.tagnosize))[0] + if tagno > 4096: + raise ValueError("suspicious number of tags") + except Exception: + raise ValueError("corrupted page list at offset %i" % self.offset) + + # tags = {} + tagcodes = {273, 279, 324, 325} # TIFF.FRAME_TAGS + tagsize = parent.tagsize + codeformat = parent.tagformat1[:2] + + data = fh.read(tagsize * tagno) + index = -tagsize + for _ in range(tagno): + index += tagsize + code = unpack(codeformat, data[index : index + 2])[0] + if code not in tagcodes: + continue + try: + tag = TiffTag(parent, data[index : index + tagsize]) + except TiffTag.Error as e: + warnings.warn(str(e)) + continue + if code == 273 or code == 324: + setattr(self, "dataoffsets", tag.value) + elif code == 279 or code == 325: + setattr(self, "databytecounts", tag.value) + # elif code == 270: + # tagname = tag.name + # if tagname not in tags: + # tags[tagname] = bytes2str(tag.value) + # elif 'ImageDescription1' not in tags: + # tags['ImageDescription1'] = bytes2str(tag.value) + # else: + # tags[tag.name] = tag.value + + def aspage(self): + """Return TiffPage from file.""" + self.parent.filehandle.seek(self.offset) + return TiffPage(self.parent, index=self.index, keyframe=None) + + def asarray(self, *args, **kwargs): + """Read image data from file and return as numpy array.""" + # TODO: fix TypeError on Python 2 + # "TypeError: unbound method asarray() must be called with TiffPage + # instance as first argument (got TiffFrame instance instead)" + kwargs["validate"] = False + return TiffPage.asarray(self, *args, **kwargs) + + def asrgb(self, *args, **kwargs): + """Read image data from file and return RGB image as numpy array.""" + kwargs["validate"] = False + return TiffPage.asrgb(self, *args, **kwargs) + + @property + def offsets_bytecounts(self): + """Return simplified offsets and bytecounts.""" + if self.keyframe.is_contiguous: + return self.dataoffsets[:1], self.keyframe.is_contiguous[1:] + return clean_offsets_counts(self.dataoffsets, self.databytecounts) + + @property + def is_contiguous(self): + """Return offset and size of contiguous data, else None.""" + if self.keyframe.is_contiguous: + return self.dataoffsets[0], self.keyframe.is_contiguous[1] + + @property + def is_memmappable(self): + """Return if page's image data in file can be memory-mapped.""" + return self.keyframe.is_memmappable + + def __getattr__(self, name): + """Return attribute from keyframe.""" + if name in TIFF.FRAME_ATTRS: + return getattr(self.keyframe, name) + # this error could be raised because an AttributeError was + # raised inside a @property function + raise AttributeError( + "'%s' object has no attribute '%s'" % (self.__class__.__name__, name) + ) + + def __str__(self, detail=0): + """Return string containing information about frame.""" + info = " ".join( + s for s in ("x".join(str(i) for i in self.shape), str(self.dtype)) + ) + return "TiffFrame %i @%i %s" % (self.index, self.offset, info) + + +class TiffTag(object): + """TIFF tag structure. + + Attributes + ---------- + name : string + Name of tag. + code : int + Decimal code of tag. + dtype : str + Datatype of tag data. One of TIFF DATA_FORMATS. + count : int + Number of values. + value : various types + Tag data as Python object. + ImageSourceData : int + Location of value in file. + + All attributes are read-only. + + """ + + __slots__ = ("code", "count", "dtype", "value", "valueoffset") + + class Error(Exception): + pass + + def __init__(self, parent, tagheader, **kwargs): + """Initialize instance from tag header.""" + fh = parent.filehandle + byteorder = parent.byteorder + unpack = struct.unpack + offsetsize = parent.offsetsize + + self.valueoffset = fh.tell() + offsetsize + 4 + code, type_ = unpack(parent.tagformat1, tagheader[:4]) + count, value = unpack(parent.tagformat2, tagheader[4:]) + + try: + dtype = TIFF.DATA_FORMATS[type_] + except KeyError: + raise TiffTag.Error("unknown tag data type %i" % type_) + + fmt = "%s%i%s" % (byteorder, count * int(dtype[0]), dtype[1]) + size = struct.calcsize(fmt) + if size > offsetsize or code in TIFF.TAG_READERS: + self.valueoffset = offset = unpack(parent.offsetformat, value)[0] + if offset < 8 or offset > fh.size - size: + raise TiffTag.Error("invalid tag value offset") + # if offset % 2: + # warnings.warn('tag value does not begin on word boundary') + fh.seek(offset) + if code in TIFF.TAG_READERS: + readfunc = TIFF.TAG_READERS[code] + value = readfunc(fh, byteorder, dtype, count, offsetsize) + elif type_ == 7 or (count > 1 and dtype[-1] == "B"): + value = read_bytes(fh, byteorder, dtype, count, offsetsize) + elif code in TIFF.TAGS or dtype[-1] == "s": + value = unpack(fmt, fh.read(size)) + else: + value = read_numpy(fh, byteorder, dtype, count, offsetsize) + elif dtype[-1] == "B" or type_ == 7: + value = value[:size] + else: + value = unpack(fmt, value[:size]) + + process = ( + code not in TIFF.TAG_READERS and code not in TIFF.TAG_TUPLE and type_ != 7 + ) + if process and dtype[-1] == "s" and isinstance(value[0], bytes): + # TIFF ASCII fields can contain multiple strings, + # each terminated with a NUL + value = value[0] + try: + value = bytes2str(stripascii(value).strip()) + except UnicodeDecodeError: + warnings.warn("tag %i: coercing invalid ASCII to bytes" % code) + dtype = "1B" + else: + if code in TIFF.TAG_ENUM: + t = TIFF.TAG_ENUM[code] + try: + value = tuple(t(v) for v in value) + except ValueError as e: + warnings.warn(str(e)) + if process: + if len(value) == 1: + value = value[0] + + self.code = code + self.dtype = dtype + self.count = count + self.value = value + + @property + def name(self): + return TIFF.TAGS.get(self.code, str(self.code)) + + def _fix_lsm_bitspersample(self, parent): + """Correct LSM bitspersample tag. + + Old LSM writers may use a separate region for two 16-bit values, + although they fit into the tag value element of the tag. + + """ + if self.code == 258 and self.count == 2: + # TODO: test this case; need example file + warnings.warn("correcting LSM bitspersample tag") + tof = parent.offsetformat[parent.offsetsize] + self.valueoffset = struct.unpack(tof, self._value)[0] + parent.filehandle.seek(self.valueoffset) + self.value = struct.unpack(">> # read image stack from sequence of TIFF files + >>> imsave('temp_C001T001.tif', numpy.random.rand(64, 64)) + >>> imsave('temp_C001T002.tif', numpy.random.rand(64, 64)) + >>> tifs = TiffSequence('temp_C001*.tif') + >>> tifs.shape + (1, 2) + >>> tifs.axes + 'CT' + >>> data = tifs.asarray() + >>> data.shape + (1, 2, 64, 64) + + """ + + _patterns = { + "axes": r""" + # matches Olympus OIF and Leica TIFF series + _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4})) + _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))? + _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))? + _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))? + _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))? + _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))? + _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))? + """ + } + + class ParseError(Exception): + pass + + def __init__(self, files, imread=TiffFile, pattern="axes", *args, **kwargs): + """Initialize instance from multiple files. + + Parameters + ---------- + files : str, pathlib.Path, or sequence thereof + Glob pattern or sequence of file names. + Binary streams are not supported. + imread : function or class + Image read function or class with asarray function returning numpy + array from single file. + pattern : str + Regular expression pattern that matches axes names and sequence + indices in file names. + By default, the pattern matches Olympus OIF and Leica TIFF series. + + """ + if isinstance(files, pathlib.Path): + files = str(files) + if isinstance(files, basestring): + files = natural_sorted(glob.glob(files)) + files = list(files) + if not files: + raise ValueError("no files found") + if isinstance(files[0], pathlib.Path): + files = [str(pathlib.Path(f)) for f in files] + elif not isinstance(files[0], basestring): + raise ValueError("not a file name") + self.files = files + + if hasattr(imread, "asarray"): + # redefine imread + _imread = imread + + def imread(fname, *args, **kwargs): + with _imread(fname) as im: + return im.asarray(*args, **kwargs) + + self.imread = imread + + self.pattern = self._patterns.get(pattern, pattern) + try: + self._parse() + if not self.axes: + self.axes = "I" + except self.ParseError: + self.axes = "I" + self.shape = (len(files),) + self._startindex = (0,) + self._indices = tuple((i,) for i in range(len(files))) + + def __str__(self): + """Return string with information about image sequence.""" + return "\n".join( + [ + self.files[0], + " size: %i" % len(self.files), + " axes: %s" % self.axes, + " shape: %s" % str(self.shape), + ] + ) + + def __len__(self): + return len(self.files) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + pass + + def asarray(self, out=None, *args, **kwargs): + """Read image data from all files and return as numpy array. + + The args and kwargs parameters are passed to the imread function. + + Raise IndexError or ValueError if image shapes do not match. + + """ + im = self.imread(self.files[0], *args, **kwargs) + shape = self.shape + im.shape + result = create_output(out, shape, dtype=im.dtype) + result = result.reshape(-1, *im.shape) + for index, fname in zip(self._indices, self.files): + index = [i - j for i, j in zip(index, self._startindex)] + index = numpy.ravel_multi_index(index, self.shape) + im = self.imread(fname, *args, **kwargs) + result[index] = im + result.shape = shape + return result + + def _parse(self): + """Get axes and shape from file names.""" + if not self.pattern: + raise self.ParseError("invalid pattern") + pattern = re.compile(self.pattern, re.IGNORECASE | re.VERBOSE) + matches = pattern.findall(self.files[0]) + if not matches: + raise self.ParseError("pattern does not match file names") + matches = matches[-1] + if len(matches) % 2: + raise self.ParseError("pattern does not match axis name and index") + axes = "".join(m for m in matches[::2] if m) + if not axes: + raise self.ParseError("pattern does not match file names") + + indices = [] + for fname in self.files: + matches = pattern.findall(fname)[-1] + if axes != "".join(m for m in matches[::2] if m): + raise ValueError("axes do not match within the image sequence") + indices.append([int(m) for m in matches[1::2] if m]) + shape = tuple(numpy.max(indices, axis=0)) + startindex = tuple(numpy.min(indices, axis=0)) + shape = tuple(i - j + 1 for i, j in zip(shape, startindex)) + if product(shape) != len(self.files): + warnings.warn("files are missing. Missing data are zeroed") + + self.axes = axes.upper() + self.shape = shape + self._indices = indices + self._startindex = startindex + + +class FileHandle(object): + """Binary file handle. + + A limited, special purpose file handler that can: + + * handle embedded files (for CZI within CZI files) + * re-open closed files (for multi-file formats, such as OME-TIFF) + * read and write numpy arrays and records from file like objects + + Only 'rb' and 'wb' modes are supported. Concurrently reading and writing + of the same stream is untested. + + When initialized from another file handle, do not use it unless this + FileHandle is closed. + + Attributes + ---------- + name : str + Name of the file. + path : str + Absolute path to file. + size : int + Size of file in bytes. + is_file : bool + If True, file has a filno and can be memory-mapped. + + All attributes are read-only. + + """ + + __slots__ = ( + "_fh", + "_file", + "_mode", + "_name", + "_dir", + "_lock", + "_offset", + "_size", + "_close", + "is_file", + ) + + def __init__(self, file, mode="rb", name=None, offset=None, size=None): + """Initialize file handle from file name or another file handle. + + Parameters + ---------- + file : str, pathlib.Path, binary stream, or FileHandle + File name or seekable binary stream, such as an open file + or BytesIO. + mode : str + File open mode in case 'file' is a file name. Must be 'rb' or 'wb'. + name : str + Optional name of file in case 'file' is a binary stream. + offset : int + Optional start position of embedded file. By default, this is + the current file position. + size : int + Optional size of embedded file. By default, this is the number + of bytes from the 'offset' to the end of the file. + + """ + self._file = file + self._fh = None + self._mode = mode + self._name = name + self._dir = "" + self._offset = offset + self._size = size + self._close = True + self.is_file = False + self._lock = NullContext() + self.open() + + def open(self): + """Open or re-open file.""" + if self._fh: + return # file is open + + if isinstance(self._file, pathlib.Path): + self._file = str(self._file) + if isinstance(self._file, basestring): + # file name + self._file = os.path.realpath(self._file) + self._dir, self._name = os.path.split(self._file) + self._fh = open(self._file, self._mode) + self._close = True + if self._offset is None: + self._offset = 0 + elif isinstance(self._file, FileHandle): + # FileHandle + self._fh = self._file._fh + if self._offset is None: + self._offset = 0 + self._offset += self._file._offset + self._close = False + if not self._name: + if self._offset: + name, ext = os.path.splitext(self._file._name) + self._name = "%s@%i%s" % (name, self._offset, ext) + else: + self._name = self._file._name + if self._mode and self._mode != self._file._mode: + raise ValueError("FileHandle has wrong mode") + self._mode = self._file._mode + self._dir = self._file._dir + elif hasattr(self._file, "seek"): + # binary stream: open file, BytesIO + try: + self._file.tell() + except Exception: + raise ValueError("binary stream is not seekable") + self._fh = self._file + if self._offset is None: + self._offset = self._file.tell() + self._close = False + if not self._name: + try: + self._dir, self._name = os.path.split(self._fh.name) + except AttributeError: + self._name = "Unnamed binary stream" + try: + self._mode = self._fh.mode + except AttributeError: + pass + else: + raise ValueError( + "The first parameter must be a file name, " + "seekable binary stream, or FileHandle" + ) + + if self._offset: + self._fh.seek(self._offset) + + if self._size is None: + pos = self._fh.tell() + self._fh.seek(self._offset, 2) + self._size = self._fh.tell() + self._fh.seek(pos) + + try: + self._fh.fileno() + self.is_file = True + except Exception: + self.is_file = False + + def read(self, size=-1): + """Read 'size' bytes from file, or until EOF is reached.""" + if size < 0 and self._offset: + size = self._size + return self._fh.read(size) + + def write(self, bytestring): + """Write bytestring to file.""" + return self._fh.write(bytestring) + + def flush(self): + """Flush write buffers if applicable.""" + return self._fh.flush() + + def memmap_array(self, dtype, shape, offset=0, mode="r", order="C"): + """Return numpy.memmap of data stored in file.""" + if not self.is_file: + raise ValueError("Cannot memory-map file without fileno") + return numpy.memmap( + self._fh, + dtype=dtype, + mode=mode, + offset=self._offset + offset, + shape=shape, + order=order, + ) + + def read_array( + self, dtype, count=-1, sep="", chunksize=2**25, out=None, native=False + ): + """Return numpy array from file. + + Work around numpy issue #2230, "numpy.fromfile does not accept + StringIO object" https://github.com/numpy/numpy/issues/2230. + + """ + fh = self._fh + dtype = numpy.dtype(dtype) + size = self._size if count < 0 else count * dtype.itemsize + + if out is None: + try: + result = numpy.fromfile(fh, dtype, count, sep) + except IOError: + # ByteIO + data = fh.read(size) + result = numpy.frombuffer(data, dtype, count).copy() + if native and not result.dtype.isnative: + # swap byte order and dtype without copy + result.byteswap(True) + result = result.newbyteorder() + return result + + # Read data from file in chunks and copy to output array + shape = out.shape + size = min(out.nbytes, size) + out = out.reshape(-1) + index = 0 + while size > 0: + data = fh.read(min(chunksize, size)) + datasize = len(data) + if datasize == 0: + break + size -= datasize + data = numpy.frombuffer(data, dtype) + out[index : index + data.size] = data + index += data.size + + if hasattr(out, "flush"): + out.flush() + return out.reshape(shape) + + def read_record(self, dtype, shape=1, byteorder=None): + """Return numpy record from file.""" + rec = numpy.rec + try: + record = rec.fromfile(self._fh, dtype, shape, byteorder=byteorder) + except Exception: + dtype = numpy.dtype(dtype) + if shape is None: + shape = self._size // dtype.itemsize + size = product(sequence(shape)) * dtype.itemsize + data = self._fh.read(size) + record = rec.fromstring(data, dtype, shape, byteorder=byteorder) + return record[0] if shape == 1 else record + + def write_empty(self, size): + """Append size bytes to file. Position must be at end of file.""" + if size < 1: + return + self._fh.seek(size - 1, 1) + self._fh.write(b"\x00") + + def write_array(self, data): + """Write numpy array to binary file.""" + try: + data.tofile(self._fh) + except Exception: + # BytesIO + self._fh.write(data.tostring()) + + def tell(self): + """Return file's current position.""" + return self._fh.tell() - self._offset + + def seek(self, offset, whence=0): + """Set file's current position.""" + if self._offset: + if whence == 0: + self._fh.seek(self._offset + offset, whence) + return + elif whence == 2 and self._size > 0: + self._fh.seek(self._offset + self._size + offset, 0) + return + self._fh.seek(offset, whence) + + def close(self): + """Close file.""" + if self._close and self._fh: + self._fh.close() + self._fh = None + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def __getattr__(self, name): + """Return attribute from underlying file object.""" + if self._offset: + warnings.warn("FileHandle: '%s' not implemented for embedded files" % name) + return getattr(self._fh, name) + + @property + def name(self): + return self._name + + @property + def dirname(self): + return self._dir + + @property + def path(self): + return os.path.join(self._dir, self._name) + + @property + def size(self): + return self._size + + @property + def closed(self): + return self._fh is None + + @property + def lock(self): + return self._lock + + @lock.setter + def lock(self, value): + self._lock = threading.RLock() if value else NullContext() + + +class NullContext(object): + """Null context manager. + + >>> with NullContext(): + ... pass + + """ + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass + + +class OpenFileCache(object): + """Keep files open.""" + + __slots__ = ("files", "past", "lock", "size") + + def __init__(self, size, lock=None): + """Initialize open file cache.""" + self.past = [] # FIFO of opened files + self.files = {} # refcounts of opened files + self.lock = NullContext() if lock is None else lock + self.size = int(size) + + def open(self, filehandle): + """Re-open file if necessary.""" + with self.lock: + if filehandle in self.files: + self.files[filehandle] += 1 + elif filehandle.closed: + filehandle.open() + self.files[filehandle] = 1 + self.past.append(filehandle) + + def close(self, filehandle): + """Close opened file if no longer used.""" + with self.lock: + if filehandle in self.files: + self.files[filehandle] -= 1 + # trim the file cache + index = 0 + size = len(self.past) + while size > self.size and index < size: + filehandle = self.past[index] + if self.files[filehandle] == 0: + filehandle.close() + del self.files[filehandle] + del self.past[index] + size -= 1 + else: + index += 1 + + def clear(self): + """Close all opened files if not in use.""" + with self.lock: + for filehandle, refcount in list(self.files.items()): + if refcount == 0: + filehandle.close() + del self.files[filehandle] + del self.past[self.past.index(filehandle)] + + +class LazyConst(object): + """Class whose attributes are computed on first access from its methods.""" + + def __init__(self, cls): + self._cls = cls + self.__doc__ = getattr(cls, "__doc__") + + def __getattr__(self, name): + func = getattr(self._cls, name) + if not callable(func): + return func + try: + value = func() + except TypeError: + # Python 2 unbound method + value = func.__func__() + setattr(self, name, value) + return value + + +@LazyConst +class TIFF(object): + """Namespace for module constants.""" + + def TAGS(): + # TIFF tag codes and names from TIFF6, TIFF/EP, EXIF, and other specs + return { + 11: "ProcessingSoftware", + 254: "NewSubfileType", + 255: "SubfileType", + 256: "ImageWidth", + 257: "ImageLength", + 258: "BitsPerSample", + 259: "Compression", + 262: "PhotometricInterpretation", + 263: "Thresholding", + 264: "CellWidth", + 265: "CellLength", + 266: "FillOrder", + 269: "DocumentName", + 270: "ImageDescription", + 271: "Make", + 272: "Model", + 273: "StripOffsets", + 274: "Orientation", + 277: "SamplesPerPixel", + 278: "RowsPerStrip", + 279: "StripByteCounts", + 280: "MinSampleValue", + 281: "MaxSampleValue", + 282: "XResolution", + 283: "YResolution", + 284: "PlanarConfiguration", + 285: "PageName", + 286: "XPosition", + 287: "YPosition", + 288: "FreeOffsets", + 289: "FreeByteCounts", + 290: "GrayResponseUnit", + 291: "GrayResponseCurve", + 292: "T4Options", + 293: "T6Options", + 296: "ResolutionUnit", + 297: "PageNumber", + 300: "ColorResponseUnit", + 301: "TransferFunction", + 305: "Software", + 306: "DateTime", + 315: "Artist", + 316: "HostComputer", + 317: "Predictor", + 318: "WhitePoint", + 319: "PrimaryChromaticities", + 320: "ColorMap", + 321: "HalftoneHints", + 322: "TileWidth", + 323: "TileLength", + 324: "TileOffsets", + 325: "TileByteCounts", + 326: "BadFaxLines", + 327: "CleanFaxData", + 328: "ConsecutiveBadFaxLines", + 330: "SubIFDs", + 332: "InkSet", + 333: "InkNames", + 334: "NumberOfInks", + 336: "DotRange", + 337: "TargetPrinter", + 338: "ExtraSamples", + 339: "SampleFormat", + 340: "SMinSampleValue", + 341: "SMaxSampleValue", + 342: "TransferRange", + 343: "ClipPath", + 344: "XClipPathUnits", + 345: "YClipPathUnits", + 346: "Indexed", + 347: "JPEGTables", + 351: "OPIProxy", + 400: "GlobalParametersIFD", + 401: "ProfileType", + 402: "FaxProfile", + 403: "CodingMethods", + 404: "VersionYear", + 405: "ModeNumber", + 433: "Decode", + 434: "DefaultImageColor", + 435: "T82Options", + 437: "JPEGTables_", # 347 + 512: "JPEGProc", + 513: "JPEGInterchangeFormat", + 514: "JPEGInterchangeFormatLength", + 515: "JPEGRestartInterval", + 517: "JPEGLosslessPredictors", + 518: "JPEGPointTransforms", + 519: "JPEGQTables", + 520: "JPEGDCTables", + 521: "JPEGACTables", + 529: "YCbCrCoefficients", + 530: "YCbCrSubSampling", + 531: "YCbCrPositioning", + 532: "ReferenceBlackWhite", + 559: "StripRowCounts", + 700: "XMP", # XMLPacket + 769: "GDIGamma", # GDI+ + 770: "ICCProfileDescriptor", # GDI+ + 771: "SRGBRenderingIntent", # GDI+ + 800: "ImageTitle", # GDI+ + 999: "USPTO_Miscellaneous", + 4864: "AndorId", # TODO: Andor Technology 4864 - 5030 + 4869: "AndorTemperature", + 4876: "AndorExposureTime", + 4878: "AndorKineticCycleTime", + 4879: "AndorAccumulations", + 4881: "AndorAcquisitionCycleTime", + 4882: "AndorReadoutTime", + 4884: "AndorPhotonCounting", + 4885: "AndorEmDacLevel", + 4890: "AndorFrames", + 4896: "AndorHorizontalFlip", + 4897: "AndorVerticalFlip", + 4898: "AndorClockwise", + 4899: "AndorCounterClockwise", + 4904: "AndorVerticalClockVoltage", + 4905: "AndorVerticalShiftSpeed", + 4907: "AndorPreAmpSetting", + 4908: "AndorCameraSerial", + 4911: "AndorActualTemperature", + 4912: "AndorBaselineClamp", + 4913: "AndorPrescans", + 4914: "AndorModel", + 4915: "AndorChipSizeX", + 4916: "AndorChipSizeY", + 4944: "AndorBaselineOffset", + 4966: "AndorSoftwareVersion", + 18246: "Rating", + 18247: "XP_DIP_XML", + 18248: "StitchInfo", + 18249: "RatingPercent", + 20481: "ResolutionXUnit", # GDI+ + 20482: "ResolutionYUnit", # GDI+ + 20483: "ResolutionXLengthUnit", # GDI+ + 20484: "ResolutionYLengthUnit", # GDI+ + 20485: "PrintFlags", # GDI+ + 20486: "PrintFlagsVersion", # GDI+ + 20487: "PrintFlagsCrop", # GDI+ + 20488: "PrintFlagsBleedWidth", # GDI+ + 20489: "PrintFlagsBleedWidthScale", # GDI+ + 20490: "HalftoneLPI", # GDI+ + 20491: "HalftoneLPIUnit", # GDI+ + 20492: "HalftoneDegree", # GDI+ + 20493: "HalftoneShape", # GDI+ + 20494: "HalftoneMisc", # GDI+ + 20495: "HalftoneScreen", # GDI+ + 20496: "JPEGQuality", # GDI+ + 20497: "GridSize", # GDI+ + 20498: "ThumbnailFormat", # GDI+ + 20499: "ThumbnailWidth", # GDI+ + 20500: "ThumbnailHeight", # GDI+ + 20501: "ThumbnailColorDepth", # GDI+ + 20502: "ThumbnailPlanes", # GDI+ + 20503: "ThumbnailRawBytes", # GDI+ + 20504: "ThumbnailSize", # GDI+ + 20505: "ThumbnailCompressedSize", # GDI+ + 20506: "ColorTransferFunction", # GDI+ + 20507: "ThumbnailData", + 20512: "ThumbnailImageWidth", # GDI+ + 20513: "ThumbnailImageHeight", # GDI+ + 20514: "ThumbnailBitsPerSample", # GDI+ + 20515: "ThumbnailCompression", + 20516: "ThumbnailPhotometricInterp", # GDI+ + 20517: "ThumbnailImageDescription", # GDI+ + 20518: "ThumbnailEquipMake", # GDI+ + 20519: "ThumbnailEquipModel", # GDI+ + 20520: "ThumbnailStripOffsets", # GDI+ + 20521: "ThumbnailOrientation", # GDI+ + 20522: "ThumbnailSamplesPerPixel", # GDI+ + 20523: "ThumbnailRowsPerStrip", # GDI+ + 20524: "ThumbnailStripBytesCount", # GDI+ + 20525: "ThumbnailResolutionX", + 20526: "ThumbnailResolutionY", + 20527: "ThumbnailPlanarConfig", # GDI+ + 20528: "ThumbnailResolutionUnit", + 20529: "ThumbnailTransferFunction", + 20530: "ThumbnailSoftwareUsed", # GDI+ + 20531: "ThumbnailDateTime", # GDI+ + 20532: "ThumbnailArtist", # GDI+ + 20533: "ThumbnailWhitePoint", # GDI+ + 20534: "ThumbnailPrimaryChromaticities", # GDI+ + 20535: "ThumbnailYCbCrCoefficients", # GDI+ + 20536: "ThumbnailYCbCrSubsampling", # GDI+ + 20537: "ThumbnailYCbCrPositioning", + 20538: "ThumbnailRefBlackWhite", # GDI+ + 20539: "ThumbnailCopyRight", # GDI+ + 20545: "InteroperabilityIndex", + 20546: "InteroperabilityVersion", + 20624: "LuminanceTable", + 20625: "ChrominanceTable", + 20736: "FrameDelay", # GDI+ + 20737: "LoopCount", # GDI+ + 20738: "GlobalPalette", # GDI+ + 20739: "IndexBackground", # GDI+ + 20740: "IndexTransparent", # GDI+ + 20752: "PixelUnit", # GDI+ + 20753: "PixelPerUnitX", # GDI+ + 20754: "PixelPerUnitY", # GDI+ + 20755: "PaletteHistogram", # GDI+ + 28672: "SonyRawFileType", # Sony ARW + 28722: "VignettingCorrParams", # Sony ARW + 28725: "ChromaticAberrationCorrParams", # Sony ARW + 28727: "DistortionCorrParams", # Sony ARW + # Private tags >= 32768 + 32781: "ImageID", + 32931: "WangTag1", + 32932: "WangAnnotation", + 32933: "WangTag3", + 32934: "WangTag4", + 32953: "ImageReferencePoints", + 32954: "RegionXformTackPoint", + 32955: "WarpQuadrilateral", + 32956: "AffineTransformMat", + 32995: "Matteing", + 32996: "DataType", + 32997: "ImageDepth", + 32998: "TileDepth", + 33300: "ImageFullWidth", + 33301: "ImageFullLength", + 33302: "TextureFormat", + 33303: "TextureWrapModes", + 33304: "FieldOfViewCotangent", + 33305: "MatrixWorldToScreen", + 33306: "MatrixWorldToCamera", + 33405: "Model2", + 33421: "CFARepeatPatternDim", + 33422: "CFAPattern", + 33423: "BatteryLevel", + 33424: "KodakIFD", + 33434: "ExposureTime", + 33437: "FNumber", + 33432: "Copyright", + 33445: "MDFileTag", + 33446: "MDScalePixel", + 33447: "MDColorTable", + 33448: "MDLabName", + 33449: "MDSampleInfo", + 33450: "MDPrepDate", + 33451: "MDPrepTime", + 33452: "MDFileUnits", + 33550: "ModelPixelScaleTag", + 33589: "AdventScale", + 33590: "AdventRevision", + 33628: "UIC1tag", # Metamorph Universal Imaging Corp STK + 33629: "UIC2tag", + 33630: "UIC3tag", + 33631: "UIC4tag", + 33723: "IPTCNAA", + 33858: "ExtendedTagsOffset", # DEFF points IFD with private tags + 33918: "IntergraphPacketData", # INGRPacketDataTag + 33919: "IntergraphFlagRegisters", # INGRFlagRegisters + 33920: "IntergraphMatrixTag", # IrasBTransformationMatrix + 33921: "INGRReserved", + 33922: "ModelTiepointTag", + 33923: "LeicaMagic", + 34016: "Site", + 34017: "ColorSequence", + 34018: "IT8Header", + 34019: "RasterPadding", + 34020: "BitsPerRunLength", + 34021: "BitsPerExtendedRunLength", + 34022: "ColorTable", + 34023: "ImageColorIndicator", + 34024: "BackgroundColorIndicator", + 34025: "ImageColorValue", + 34026: "BackgroundColorValue", + 34027: "PixelIntensityRange", + 34028: "TransparencyIndicator", + 34029: "ColorCharacterization", + 34030: "HCUsage", + 34031: "TrapIndicator", + 34032: "CMYKEquivalent", + 34118: "CZ_SEM", # Zeiss SEM + 34152: "AFCP_IPTC", + 34232: "PixelMagicJBIGOptions", + 34263: "JPLCartoIFD", + 34122: "IPLAB", # number of images + 34264: "ModelTransformationTag", + 34306: "WB_GRGBLevels", # Leaf MOS + 34310: "LeafData", + 34361: "MM_Header", + 34362: "MM_Stamp", + 34363: "MM_Unknown", + 34377: "ImageResources", # Photoshop + 34386: "MM_UserBlock", + 34412: "CZ_LSMINFO", + 34665: "ExifTag", + 34675: "InterColorProfile", # ICCProfile + 34680: "FEI_SFEG", # + 34682: "FEI_HELIOS", # + 34683: "FEI_TITAN", # + 34687: "FXExtensions", + 34688: "MultiProfiles", + 34689: "SharedData", + 34690: "T88Options", + 34710: "MarCCD", # offset to MarCCD header + 34732: "ImageLayer", + 34735: "GeoKeyDirectoryTag", + 34736: "GeoDoubleParamsTag", + 34737: "GeoAsciiParamsTag", + 34750: "JBIGOptions", + 34821: "PIXTIFF", # ? Pixel Translations Inc + 34850: "ExposureProgram", + 34852: "SpectralSensitivity", + 34853: "GPSTag", # GPSIFD + 34855: "ISOSpeedRatings", + 34856: "OECF", + 34857: "Interlace", + 34858: "TimeZoneOffset", + 34859: "SelfTimerMode", + 34864: "SensitivityType", + 34865: "StandardOutputSensitivity", + 34866: "RecommendedExposureIndex", + 34867: "ISOSpeed", + 34868: "ISOSpeedLatitudeyyy", + 34869: "ISOSpeedLatitudezzz", + 34908: "HylaFAXFaxRecvParams", + 34909: "HylaFAXFaxSubAddress", + 34910: "HylaFAXFaxRecvTime", + 34911: "FaxDcs", + 34929: "FedexEDR", + 34954: "LeafSubIFD", + 34959: "Aphelion1", + 34960: "Aphelion2", + 34961: "AphelionInternal", # ADCIS + 36864: "ExifVersion", + 36867: "DateTimeOriginal", + 36868: "DateTimeDigitized", + 36873: "GooglePlusUploadCode", + 36880: "OffsetTime", + 36881: "OffsetTimeOriginal", + 36882: "OffsetTimeDigitized", + # TODO: Pilatus/CHESS/TV6 36864..37120 conflicting with Exif tags + # 36864: 'TVX ?', + # 36865: 'TVX_NumExposure', + # 36866: 'TVX_NumBackground', + # 36867: 'TVX_ExposureTime', + # 36868: 'TVX_BackgroundTime', + # 36870: 'TVX ?', + # 36873: 'TVX_SubBpp', + # 36874: 'TVX_SubWide', + # 36875: 'TVX_SubHigh', + # 36876: 'TVX_BlackLevel', + # 36877: 'TVX_DarkCurrent', + # 36878: 'TVX_ReadNoise', + # 36879: 'TVX_DarkCurrentNoise', + # 36880: 'TVX_BeamMonitor', + # 37120: 'TVX_UserVariables', # A/D values + 37121: "ComponentsConfiguration", + 37122: "CompressedBitsPerPixel", + 37377: "ShutterSpeedValue", + 37378: "ApertureValue", + 37379: "BrightnessValue", + 37380: "ExposureBiasValue", + 37381: "MaxApertureValue", + 37382: "SubjectDistance", + 37383: "MeteringMode", + 37384: "LightSource", + 37385: "Flash", + 37386: "FocalLength", + 37387: "FlashEnergy_", # 37387 + 37388: "SpatialFrequencyResponse_", # 37388 + 37389: "Noise", + 37390: "FocalPlaneXResolution", + 37391: "FocalPlaneYResolution", + 37392: "FocalPlaneResolutionUnit", + 37393: "ImageNumber", + 37394: "SecurityClassification", + 37395: "ImageHistory", + 37396: "SubjectLocation", + 37397: "ExposureIndex", + 37398: "TIFFEPStandardID", + 37399: "SensingMethod", + 37434: "CIP3DataFile", + 37435: "CIP3Sheet", + 37436: "CIP3Side", + 37439: "StoNits", + 37500: "MakerNote", + 37510: "UserComment", + 37520: "SubsecTime", + 37521: "SubsecTimeOriginal", + 37522: "SubsecTimeDigitized", + 37679: "MODIText", # Microsoft Office Document Imaging + 37680: "MODIOLEPropertySetStorage", + 37681: "MODIPositioning", + 37706: "TVIPS", # offset to TemData structure + 37707: "TVIPS1", + 37708: "TVIPS2", # same TemData structure as undefined + 37724: "ImageSourceData", # Photoshop + 37888: "Temperature", + 37889: "Humidity", + 37890: "Pressure", + 37891: "WaterDepth", + 37892: "Acceleration", + 37893: "CameraElevationAngle", + 40001: "MC_IpWinScal", # Media Cybernetics + 40100: "MC_IdOld", + 40965: "InteroperabilityTag", # InteropOffset + 40091: "XPTitle", + 40092: "XPComment", + 40093: "XPAuthor", + 40094: "XPKeywords", + 40095: "XPSubject", + 40960: "FlashpixVersion", + 40961: "ColorSpace", + 40962: "PixelXDimension", + 40963: "PixelYDimension", + 40964: "RelatedSoundFile", + 40976: "SamsungRawPointersOffset", + 40977: "SamsungRawPointersLength", + 41217: "SamsungRawByteOrder", + 41218: "SamsungRawUnknown", + 41483: "FlashEnergy", + 41484: "SpatialFrequencyResponse", + 41485: "Noise_", # 37389 + 41486: "FocalPlaneXResolution_", # 37390 + 41487: "FocalPlaneYResolution_", # 37391 + 41488: "FocalPlaneResolutionUnit_", # 37392 + 41489: "ImageNumber_", # 37393 + 41490: "SecurityClassification_", # 37394 + 41491: "ImageHistory_", # 37395 + 41492: "SubjectLocation_", # 37395 + 41493: "ExposureIndex_ ", # 37397 + 41494: "TIFF-EPStandardID", + 41495: "SensingMethod_", # 37399 + 41728: "FileSource", + 41729: "SceneType", + 41730: "CFAPattern_", # 33422 + 41985: "CustomRendered", + 41986: "ExposureMode", + 41987: "WhiteBalance", + 41988: "DigitalZoomRatio", + 41989: "FocalLengthIn35mmFilm", + 41990: "SceneCaptureType", + 41991: "GainControl", + 41992: "Contrast", + 41993: "Saturation", + 41994: "Sharpness", + 41995: "DeviceSettingDescription", + 41996: "SubjectDistanceRange", + 42016: "ImageUniqueID", + 42032: "CameraOwnerName", + 42033: "BodySerialNumber", + 42034: "LensSpecification", + 42035: "LensMake", + 42036: "LensModel", + 42037: "LensSerialNumber", + 42112: "GDAL_METADATA", + 42113: "GDAL_NODATA", + 42240: "Gamma", + 43314: "NIHImageHeader", + 44992: "ExpandSoftware", + 44993: "ExpandLens", + 44994: "ExpandFilm", + 44995: "ExpandFilterLens", + 44996: "ExpandScanner", + 44997: "ExpandFlashLamp", + 48129: "PixelFormat", # HDP and WDP + 48130: "Transformation", + 48131: "Uncompressed", + 48132: "ImageType", + 48256: "ImageWidth_", # 256 + 48257: "ImageHeight_", + 48258: "WidthResolution", + 48259: "HeightResolution", + 48320: "ImageOffset", + 48321: "ImageByteCount", + 48322: "AlphaOffset", + 48323: "AlphaByteCount", + 48324: "ImageDataDiscard", + 48325: "AlphaDataDiscard", + 50215: "OceScanjobDescription", + 50216: "OceApplicationSelector", + 50217: "OceIdentificationNumber", + 50218: "OceImageLogicCharacteristics", + 50255: "Annotations", + 50288: "MC_Id", # Media Cybernetics + 50289: "MC_XYPosition", + 50290: "MC_ZPosition", + 50291: "MC_XYCalibration", + 50292: "MC_LensCharacteristics", + 50293: "MC_ChannelName", + 50294: "MC_ExcitationWavelength", + 50295: "MC_TimeStamp", + 50296: "MC_FrameProperties", + 50341: "PrintImageMatching", + 50495: "PCO_RAW", # TODO: PCO CamWare + 50547: "OriginalFileName", + 50560: "USPTO_OriginalContentType", # US Patent Office + 50561: "USPTO_RotationCode", + 50656: "CR2CFAPattern", + 50706: "DNGVersion", # DNG 50706 .. 51112 + 50707: "DNGBackwardVersion", + 50708: "UniqueCameraModel", + 50709: "LocalizedCameraModel", + 50710: "CFAPlaneColor", + 50711: "CFALayout", + 50712: "LinearizationTable", + 50713: "BlackLevelRepeatDim", + 50714: "BlackLevel", + 50715: "BlackLevelDeltaH", + 50716: "BlackLevelDeltaV", + 50717: "WhiteLevel", + 50718: "DefaultScale", + 50719: "DefaultCropOrigin", + 50720: "DefaultCropSize", + 50721: "ColorMatrix1", + 50722: "ColorMatrix2", + 50723: "CameraCalibration1", + 50724: "CameraCalibration2", + 50725: "ReductionMatrix1", + 50726: "ReductionMatrix2", + 50727: "AnalogBalance", + 50728: "AsShotNeutral", + 50729: "AsShotWhiteXY", + 50730: "BaselineExposure", + 50731: "BaselineNoise", + 50732: "BaselineSharpness", + 50733: "BayerGreenSplit", + 50734: "LinearResponseLimit", + 50735: "CameraSerialNumber", + 50736: "LensInfo", + 50737: "ChromaBlurRadius", + 50738: "AntiAliasStrength", + 50739: "ShadowScale", + 50740: "DNGPrivateData", + 50741: "MakerNoteSafety", + 50752: "RawImageSegmentation", + 50778: "CalibrationIlluminant1", + 50779: "CalibrationIlluminant2", + 50780: "BestQualityScale", + 50781: "RawDataUniqueID", + 50784: "AliasLayerMetadata", + 50827: "OriginalRawFileName", + 50828: "OriginalRawFileData", + 50829: "ActiveArea", + 50830: "MaskedAreas", + 50831: "AsShotICCProfile", + 50832: "AsShotPreProfileMatrix", + 50833: "CurrentICCProfile", + 50834: "CurrentPreProfileMatrix", + 50838: "IJMetadataByteCounts", + 50839: "IJMetadata", + 50844: "RPCCoefficientTag", + 50879: "ColorimetricReference", + 50885: "SRawType", + 50898: "PanasonicTitle", + 50899: "PanasonicTitle2", + 50931: "CameraCalibrationSignature", + 50932: "ProfileCalibrationSignature", + 50933: "ProfileIFD", + 50934: "AsShotProfileName", + 50935: "NoiseReductionApplied", + 50936: "ProfileName", + 50937: "ProfileHueSatMapDims", + 50938: "ProfileHueSatMapData1", + 50939: "ProfileHueSatMapData2", + 50940: "ProfileToneCurve", + 50941: "ProfileEmbedPolicy", + 50942: "ProfileCopyright", + 50964: "ForwardMatrix1", + 50965: "ForwardMatrix2", + 50966: "PreviewApplicationName", + 50967: "PreviewApplicationVersion", + 50968: "PreviewSettingsName", + 50969: "PreviewSettingsDigest", + 50970: "PreviewColorSpace", + 50971: "PreviewDateTime", + 50972: "RawImageDigest", + 50973: "OriginalRawFileDigest", + 50974: "SubTileBlockSize", + 50975: "RowInterleaveFactor", + 50981: "ProfileLookTableDims", + 50982: "ProfileLookTableData", + 51008: "OpcodeList1", + 51009: "OpcodeList2", + 51022: "OpcodeList3", + 51023: "FibicsXML", # + 51041: "NoiseProfile", + 51043: "TimeCodes", + 51044: "FrameRate", + 51058: "TStop", + 51081: "ReelName", + 51089: "OriginalDefaultFinalSize", + 51090: "OriginalBestQualitySize", + 51091: "OriginalDefaultCropSize", + 51105: "CameraLabel", + 51107: "ProfileHueSatMapEncoding", + 51108: "ProfileLookTableEncoding", + 51109: "BaselineExposureOffset", + 51110: "DefaultBlackRender", + 51111: "NewRawImageDigest", + 51112: "RawToPreviewGain", + 51125: "DefaultUserCrop", + 51123: "MicroManagerMetadata", + 59932: "Padding", + 59933: "OffsetSchema", + # Reusable Tags 65000-65535 + # 65000: Dimap_Document XML + # 65000-65112: Photoshop Camera RAW EXIF tags + # 65000: 'OwnerName', + # 65001: 'SerialNumber', + # 65002: 'Lens', + # 65024: 'KDC_IFD', + # 65100: 'RawFile', + # 65101: 'Converter', + # 65102: 'WhiteBalance', + # 65105: 'Exposure', + # 65106: 'Shadows', + # 65107: 'Brightness', + # 65108: 'Contrast', + # 65109: 'Saturation', + # 65110: 'Sharpness', + # 65111: 'Smoothness', + # 65112: 'MoireFilter', + 65200: "FlexXML", # + 65563: "PerSample", + } + + def TAG_NAMES(): + return {v: c for c, v in TIFF.TAGS.items()} + + def TAG_READERS(): + # Map TIFF tag codes to import functions + return { + 320: read_colormap, + # 700: read_bytes, # read_utf8, + # 34377: read_bytes, + 33723: read_bytes, + # 34675: read_bytes, + 33628: read_uic1tag, # Universal Imaging Corp STK + 33629: read_uic2tag, + 33630: read_uic3tag, + 33631: read_uic4tag, + 34118: read_cz_sem, # Carl Zeiss SEM + 34361: read_mm_header, # Olympus FluoView + 34362: read_mm_stamp, + 34363: read_numpy, # MM_Unknown + 34386: read_numpy, # MM_UserBlock + 34412: read_cz_lsminfo, # Carl Zeiss LSM + 34680: read_fei_metadata, # S-FEG + 34682: read_fei_metadata, # Helios NanoLab + 37706: read_tvips_header, # TVIPS EMMENU + 37724: read_bytes, # ImageSourceData + 33923: read_bytes, # read_leica_magic + 43314: read_nih_image_header, + # 40001: read_bytes, + 40100: read_bytes, + 50288: read_bytes, + 50296: read_bytes, + 50839: read_bytes, + 51123: read_json, + 34665: read_exif_ifd, + 34853: read_gps_ifd, + 40965: read_interoperability_ifd, + } + + def TAG_TUPLE(): + # Tags whose values must be stored as tuples + return frozenset((273, 279, 324, 325, 530, 531, 34736)) + + def TAG_ATTRIBUTES(): + # Map tag codes to TiffPage attribute names + return { + "ImageWidth": "imagewidth", + "ImageLength": "imagelength", + "BitsPerSample": "bitspersample", + "Compression": "compression", + "PlanarConfiguration": "planarconfig", + "FillOrder": "fillorder", + "PhotometricInterpretation": "photometric", + "ColorMap": "colormap", + "ImageDescription": "description", + "ImageDescription1": "description1", + "SamplesPerPixel": "samplesperpixel", + "RowsPerStrip": "rowsperstrip", + "Software": "software", + "Predictor": "predictor", + "TileWidth": "tilewidth", + "TileLength": "tilelength", + "ExtraSamples": "extrasamples", + "SampleFormat": "sampleformat", + "ImageDepth": "imagedepth", + "TileDepth": "tiledepth", + } + + def TAG_ENUM(): + return { + # 254: TIFF.FILETYPE, + 255: TIFF.OFILETYPE, + 259: TIFF.COMPRESSION, + 262: TIFF.PHOTOMETRIC, + 263: TIFF.THRESHHOLD, + 266: TIFF.FILLORDER, + 274: TIFF.ORIENTATION, + 284: TIFF.PLANARCONFIG, + 290: TIFF.GRAYRESPONSEUNIT, + # 292: TIFF.GROUP3OPT, + # 293: TIFF.GROUP4OPT, + 296: TIFF.RESUNIT, + 300: TIFF.COLORRESPONSEUNIT, + 317: TIFF.PREDICTOR, + 338: TIFF.EXTRASAMPLE, + 339: TIFF.SAMPLEFORMAT, + # 512: TIFF.JPEGPROC, + # 531: TIFF.YCBCRPOSITION, + } + + def FILETYPE(): + class FILETYPE(enum.IntFlag): + # Python 3.6 only + UNDEFINED = 0 + REDUCEDIMAGE = 1 + PAGE = 2 + MASK = 4 + + return FILETYPE + + def OFILETYPE(): + class OFILETYPE(enum.IntEnum): + UNDEFINED = 0 + IMAGE = 1 + REDUCEDIMAGE = 2 + PAGE = 3 + + return OFILETYPE + + def COMPRESSION(): + class COMPRESSION(enum.IntEnum): + NONE = 1 # Uncompressed + CCITTRLE = 2 # CCITT 1D + CCITT_T4 = 3 # 'T4/Group 3 Fax', + CCITT_T6 = 4 # 'T6/Group 4 Fax', + LZW = 5 + OJPEG = 6 # old-style JPEG + JPEG = 7 + ADOBE_DEFLATE = 8 + JBIG_BW = 9 + JBIG_COLOR = 10 + JPEG_99 = 99 + KODAK_262 = 262 + NEXT = 32766 + SONY_ARW = 32767 + PACKED_RAW = 32769 + SAMSUNG_SRW = 32770 + CCIRLEW = 32771 + SAMSUNG_SRW2 = 32772 + PACKBITS = 32773 + THUNDERSCAN = 32809 + IT8CTPAD = 32895 + IT8LW = 32896 + IT8MP = 32897 + IT8BL = 32898 + PIXARFILM = 32908 + PIXARLOG = 32909 + DEFLATE = 32946 + DCS = 32947 + APERIO_JP2000_YCBC = 33003 # Leica Aperio + APERIO_JP2000_RGB = 33005 # Leica Aperio + JBIG = 34661 + SGILOG = 34676 + SGILOG24 = 34677 + JPEG2000 = 34712 + NIKON_NEF = 34713 + JBIG2 = 34715 + MDI_BINARY = 34718 # 'Microsoft Document Imaging + MDI_PROGRESSIVE = 34719 # 'Microsoft Document Imaging + MDI_VECTOR = 34720 # 'Microsoft Document Imaging + JPEG_LOSSY = 34892 + LZMA = 34925 + ZSTD = 34926 + OPS_PNG = 34933 # Objective Pathology Services + OPS_JPEGXR = 34934 # Objective Pathology Services + PIXTIFF = 50013 + KODAK_DCR = 65000 + PENTAX_PEF = 65535 + # def __bool__(self): return self != 1 # Python 3.6 only + + return COMPRESSION + + def PHOTOMETRIC(): + class PHOTOMETRIC(enum.IntEnum): + MINISWHITE = 0 + MINISBLACK = 1 + RGB = 2 + PALETTE = 3 + MASK = 4 + SEPARATED = 5 # CMYK + YCBCR = 6 + CIELAB = 8 + ICCLAB = 9 + ITULAB = 10 + CFA = 32803 # Color Filter Array + LOGL = 32844 + LOGLUV = 32845 + LINEAR_RAW = 34892 + + return PHOTOMETRIC + + def THRESHHOLD(): + class THRESHHOLD(enum.IntEnum): + BILEVEL = 1 + HALFTONE = 2 + ERRORDIFFUSE = 3 + + return THRESHHOLD + + def FILLORDER(): + class FILLORDER(enum.IntEnum): + MSB2LSB = 1 + LSB2MSB = 2 + + return FILLORDER + + def ORIENTATION(): + class ORIENTATION(enum.IntEnum): + TOPLEFT = 1 + TOPRIGHT = 2 + BOTRIGHT = 3 + BOTLEFT = 4 + LEFTTOP = 5 + RIGHTTOP = 6 + RIGHTBOT = 7 + LEFTBOT = 8 + + return ORIENTATION + + def PLANARCONFIG(): + class PLANARCONFIG(enum.IntEnum): + CONTIG = 1 + SEPARATE = 2 + + return PLANARCONFIG + + def GRAYRESPONSEUNIT(): + class GRAYRESPONSEUNIT(enum.IntEnum): + _10S = 1 + _100S = 2 + _1000S = 3 + _10000S = 4 + _100000S = 5 + + return GRAYRESPONSEUNIT + + def GROUP4OPT(): + class GROUP4OPT(enum.IntEnum): + UNCOMPRESSED = 2 + + return GROUP4OPT + + def RESUNIT(): + class RESUNIT(enum.IntEnum): + NONE = 1 + INCH = 2 + CENTIMETER = 3 + # def __bool__(self): return self != 1 # Python 3.6 only + + return RESUNIT + + def COLORRESPONSEUNIT(): + class COLORRESPONSEUNIT(enum.IntEnum): + _10S = 1 + _100S = 2 + _1000S = 3 + _10000S = 4 + _100000S = 5 + + return COLORRESPONSEUNIT + + def PREDICTOR(): + class PREDICTOR(enum.IntEnum): + NONE = 1 + HORIZONTAL = 2 + FLOATINGPOINT = 3 + # def __bool__(self): return self != 1 # Python 3.6 only + + return PREDICTOR + + def EXTRASAMPLE(): + class EXTRASAMPLE(enum.IntEnum): + UNSPECIFIED = 0 + ASSOCALPHA = 1 + UNASSALPHA = 2 + + return EXTRASAMPLE + + def SAMPLEFORMAT(): + class SAMPLEFORMAT(enum.IntEnum): + UINT = 1 + INT = 2 + IEEEFP = 3 + VOID = 4 + COMPLEXINT = 5 + COMPLEXIEEEFP = 6 + + return SAMPLEFORMAT + + def DATATYPES(): + class DATATYPES(enum.IntEnum): + NOTYPE = 0 + BYTE = 1 + ASCII = 2 + SHORT = 3 + LONG = 4 + RATIONAL = 5 + SBYTE = 6 + UNDEFINED = 7 + SSHORT = 8 + SLONG = 9 + SRATIONAL = 10 + FLOAT = 11 + DOUBLE = 12 + IFD = 13 + UNICODE = 14 + COMPLEX = 15 + LONG8 = 16 + SLONG8 = 17 + IFD8 = 18 + + return DATATYPES + + def DATA_FORMATS(): + # Map TIFF DATATYPES to Python struct formats + return { + 1: "1B", # BYTE 8-bit unsigned integer. + 2: "1s", # ASCII 8-bit byte that contains a 7-bit ASCII code; + # the last byte must be NULL (binary zero). + 3: "1H", # SHORT 16-bit (2-byte) unsigned integer + 4: "1I", # LONG 32-bit (4-byte) unsigned integer. + 5: "2I", # RATIONAL Two LONGs: the first represents the numerator + # of a fraction; the second, the denominator. + 6: "1b", # SBYTE An 8-bit signed (twos-complement) integer. + 7: "1B", # UNDEFINED An 8-bit byte that may contain anything, + # depending on the definition of the field. + 8: "1h", # SSHORT A 16-bit (2-byte) signed (twos-complement) + # integer. + 9: "1i", # SLONG A 32-bit (4-byte) signed (twos-complement) + # integer. + 10: "2i", # SRATIONAL Two SLONGs: the first represents the + # numerator of a fraction, the second the denominator. + 11: "1f", # FLOAT Single precision (4-byte) IEEE format. + 12: "1d", # DOUBLE Double precision (8-byte) IEEE format. + 13: "1I", # IFD unsigned 4 byte IFD offset. + # 14: '', # UNICODE + # 15: '', # COMPLEX + 16: "1Q", # LONG8 unsigned 8 byte integer (BigTiff) + 17: "1q", # SLONG8 signed 8 byte integer (BigTiff) + 18: "1Q", # IFD8 unsigned 8 byte IFD offset (BigTiff) + } + + def DATA_DTYPES(): + # Map numpy dtypes to TIFF DATATYPES + return { + "B": 1, + "s": 2, + "H": 3, + "I": 4, + "2I": 5, + "b": 6, + "h": 8, + "i": 9, + "2i": 10, + "f": 11, + "d": 12, + "Q": 16, + "q": 17, + } + + def SAMPLE_DTYPES(): + # Map TIFF SampleFormats and BitsPerSample to numpy dtype + return { + (1, 1): "?", # bitmap + (1, 2): "B", + (1, 3): "B", + (1, 4): "B", + (1, 5): "B", + (1, 6): "B", + (1, 7): "B", + (1, 8): "B", + (1, 9): "H", + (1, 10): "H", + (1, 11): "H", + (1, 12): "H", + (1, 13): "H", + (1, 14): "H", + (1, 15): "H", + (1, 16): "H", + (1, 17): "I", + (1, 18): "I", + (1, 19): "I", + (1, 20): "I", + (1, 21): "I", + (1, 22): "I", + (1, 23): "I", + (1, 24): "I", + (1, 25): "I", + (1, 26): "I", + (1, 27): "I", + (1, 28): "I", + (1, 29): "I", + (1, 30): "I", + (1, 31): "I", + (1, 32): "I", + (1, 64): "Q", + (2, 8): "b", + (2, 16): "h", + (2, 32): "i", + (2, 64): "q", + (3, 16): "e", + (3, 32): "f", + (3, 64): "d", + (6, 64): "F", + (6, 128): "D", + (1, (5, 6, 5)): "B", + } + + def COMPESSORS(): + # Map COMPRESSION to compress functions and default compression levels + + class Compressors(object): + """Delay import compressor functions.""" + + def __init__(self): + self._compressors = {8: (zlib.compress, 6), 32946: (zlib.compress, 6)} + + def __getitem__(self, key): + if key in self._compressors: + return self._compressors[key] + + if key == 34925: + try: + import lzma # delayed import + except ImportError: + try: + import backports.lzma as lzma # delayed import + except ImportError: + raise KeyError + + def lzma_compress(x, level): + return lzma.compress(x) + + self._compressors[key] = lzma_compress, 0 + return lzma_compress, 0 + + if key == 34926: + try: + import zstd # delayed import + except ImportError: + raise KeyError + self._compressors[key] = zstd.compress, 9 + return zstd.compress, 9 + + raise KeyError + + def __contains__(self, key): + try: + self[key] + return True + except KeyError: + return False + + return Compressors() + + def DECOMPESSORS(): + # Map COMPRESSION to decompress functions + + class Decompressors(object): + """Delay import decompressor functions.""" + + def __init__(self): + self._decompressors = { + None: identityfunc, + 1: identityfunc, + 5: decode_lzw, + 8: zlib.decompress, + 32773: decode_packbits, + 32946: zlib.decompress, + } + + def __getitem__(self, key): + if key in self._decompressors: + return self._decompressors[key] + + if key == 7: + try: + from imagecodecs import jpeg, jpeg_12 + except ImportError: + raise KeyError + + def decode_jpeg(x, table, bps, colorspace=None): + if bps == 8: + return jpeg.decode_jpeg(x, table, colorspace) + elif bps == 12: + return jpeg_12.decode_jpeg_12(x, table, colorspace) + else: + raise ValueError("bitspersample not supported") + + self._decompressors[key] = decode_jpeg + return decode_jpeg + + if key == 34925: + try: + import lzma # delayed import + except ImportError: + try: + import backports.lzma as lzma # delayed import + except ImportError: + raise KeyError + self._decompressors[key] = lzma.decompress + return lzma.decompress + + if key == 34926: + try: + import zstd # delayed import + except ImportError: + raise KeyError + self._decompressors[key] = zstd.decompress + return zstd.decompress + raise KeyError + + def __contains__(self, item): + try: + self[item] + return True + except KeyError: + return False + + return Decompressors() + + def FRAME_ATTRS(): + # Attributes that a TiffFrame shares with its keyframe + return set("shape ndim size dtype axes is_final".split()) + + def FILE_FLAGS(): + # TiffFile and TiffPage 'is_\*' attributes + exclude = set( + "reduced final memmappable contiguous tiled " "chroma_subsampled".split() + ) + return set( + a[3:] for a in dir(TiffPage) if a[:3] == "is_" and a[3:] not in exclude + ) + + def FILE_EXTENSIONS(): + # TIFF file extensions + return tuple( + "tif tiff ome.tif lsm stk qptiff pcoraw " + "gel seq svs bif tf8 tf2 btf".split() + ) + + def FILEOPEN_FILTER(): + # String for use in Windows File Open box + return [ + ("%s files" % ext.upper(), "*.%s" % ext) for ext in TIFF.FILE_EXTENSIONS + ] + [("allfiles", "*")] + + def AXES_LABELS(): + # TODO: is there a standard for character axes labels? + axes = { + "X": "width", + "Y": "height", + "Z": "depth", + "S": "sample", # rgb(a) + "I": "series", # general sequence, plane, page, IFD + "T": "time", + "C": "channel", # color, emission wavelength + "A": "angle", + "P": "phase", # formerly F # P is Position in LSM! + "R": "tile", # region, point, mosaic + "H": "lifetime", # histogram + "E": "lambda", # excitation wavelength + "L": "exposure", # lux + "V": "event", + "Q": "other", + "M": "mosaic", # LSM 6 + } + axes.update(dict((v, k) for k, v in axes.items())) + return axes + + def ANDOR_TAGS(): + # Andor Technology tags #4864 - 5030 + return set(range(4864, 5030)) + + def EXIF_TAGS(): + tags = { + # 65000 - 65112 Photoshop Camera RAW EXIF tags + 65000: "OwnerName", + 65001: "SerialNumber", + 65002: "Lens", + 65100: "RawFile", + 65101: "Converter", + 65102: "WhiteBalance", + 65105: "Exposure", + 65106: "Shadows", + 65107: "Brightness", + 65108: "Contrast", + 65109: "Saturation", + 65110: "Sharpness", + 65111: "Smoothness", + 65112: "MoireFilter", + } + tags.update(TIFF.TAGS) + return tags + + def GPS_TAGS(): + return { + 0: "GPSVersionID", + 1: "GPSLatitudeRef", + 2: "GPSLatitude", + 3: "GPSLongitudeRef", + 4: "GPSLongitude", + 5: "GPSAltitudeRef", + 6: "GPSAltitude", + 7: "GPSTimeStamp", + 8: "GPSSatellites", + 9: "GPSStatus", + 10: "GPSMeasureMode", + 11: "GPSDOP", + 12: "GPSSpeedRef", + 13: "GPSSpeed", + 14: "GPSTrackRef", + 15: "GPSTrack", + 16: "GPSImgDirectionRef", + 17: "GPSImgDirection", + 18: "GPSMapDatum", + 19: "GPSDestLatitudeRef", + 20: "GPSDestLatitude", + 21: "GPSDestLongitudeRef", + 22: "GPSDestLongitude", + 23: "GPSDestBearingRef", + 24: "GPSDestBearing", + 25: "GPSDestDistanceRef", + 26: "GPSDestDistance", + 27: "GPSProcessingMethod", + 28: "GPSAreaInformation", + 29: "GPSDateStamp", + 30: "GPSDifferential", + 31: "GPSHPositioningError", + } + + def IOP_TAGS(): + return { + 1: "InteroperabilityIndex", + 2: "InteroperabilityVersion", + 4096: "RelatedImageFileFormat", + 4097: "RelatedImageWidth", + 4098: "RelatedImageLength", + } + + def GEO_KEYS(): + return { + 1024: "GTModelTypeGeoKey", + 1025: "GTRasterTypeGeoKey", + 1026: "GTCitationGeoKey", + 2048: "GeographicTypeGeoKey", + 2049: "GeogCitationGeoKey", + 2050: "GeogGeodeticDatumGeoKey", + 2051: "GeogPrimeMeridianGeoKey", + 2052: "GeogLinearUnitsGeoKey", + 2053: "GeogLinearUnitSizeGeoKey", + 2054: "GeogAngularUnitsGeoKey", + 2055: "GeogAngularUnitsSizeGeoKey", + 2056: "GeogEllipsoidGeoKey", + 2057: "GeogSemiMajorAxisGeoKey", + 2058: "GeogSemiMinorAxisGeoKey", + 2059: "GeogInvFlatteningGeoKey", + 2060: "GeogAzimuthUnitsGeoKey", + 2061: "GeogPrimeMeridianLongGeoKey", + 2062: "GeogTOWGS84GeoKey", + 3059: "ProjLinearUnitsInterpCorrectGeoKey", # GDAL + 3072: "ProjectedCSTypeGeoKey", + 3073: "PCSCitationGeoKey", + 3074: "ProjectionGeoKey", + 3075: "ProjCoordTransGeoKey", + 3076: "ProjLinearUnitsGeoKey", + 3077: "ProjLinearUnitSizeGeoKey", + 3078: "ProjStdParallel1GeoKey", + 3079: "ProjStdParallel2GeoKey", + 3080: "ProjNatOriginLongGeoKey", + 3081: "ProjNatOriginLatGeoKey", + 3082: "ProjFalseEastingGeoKey", + 3083: "ProjFalseNorthingGeoKey", + 3084: "ProjFalseOriginLongGeoKey", + 3085: "ProjFalseOriginLatGeoKey", + 3086: "ProjFalseOriginEastingGeoKey", + 3087: "ProjFalseOriginNorthingGeoKey", + 3088: "ProjCenterLongGeoKey", + 3089: "ProjCenterLatGeoKey", + 3090: "ProjCenterEastingGeoKey", + 3091: "ProjFalseOriginNorthingGeoKey", + 3092: "ProjScaleAtNatOriginGeoKey", + 3093: "ProjScaleAtCenterGeoKey", + 3094: "ProjAzimuthAngleGeoKey", + 3095: "ProjStraightVertPoleLongGeoKey", + 3096: "ProjRectifiedGridAngleGeoKey", + 4096: "VerticalCSTypeGeoKey", + 4097: "VerticalCitationGeoKey", + 4098: "VerticalDatumGeoKey", + 4099: "VerticalUnitsGeoKey", + } + + def GEO_CODES(): + try: + from .tifffile_geodb import GEO_CODES # delayed import + except (ImportError, ValueError): + try: + from tifffile_geodb import GEO_CODES # delayed import + except (ImportError, ValueError): + GEO_CODES = {} + return GEO_CODES + + def CZ_LSMINFO(): + return [ + ("MagicNumber", "u4"), + ("StructureSize", "i4"), + ("DimensionX", "i4"), + ("DimensionY", "i4"), + ("DimensionZ", "i4"), + ("DimensionChannels", "i4"), + ("DimensionTime", "i4"), + ("DataType", "i4"), # DATATYPES + ("ThumbnailX", "i4"), + ("ThumbnailY", "i4"), + ("VoxelSizeX", "f8"), + ("VoxelSizeY", "f8"), + ("VoxelSizeZ", "f8"), + ("OriginX", "f8"), + ("OriginY", "f8"), + ("OriginZ", "f8"), + ("ScanType", "u2"), + ("SpectralScan", "u2"), + ("TypeOfData", "u4"), # TYPEOFDATA + ("OffsetVectorOverlay", "u4"), + ("OffsetInputLut", "u4"), + ("OffsetOutputLut", "u4"), + ("OffsetChannelColors", "u4"), + ("TimeIntervall", "f8"), + ("OffsetChannelDataTypes", "u4"), + ("OffsetScanInformation", "u4"), # SCANINFO + ("OffsetKsData", "u4"), + ("OffsetTimeStamps", "u4"), + ("OffsetEventList", "u4"), + ("OffsetRoi", "u4"), + ("OffsetBleachRoi", "u4"), + ("OffsetNextRecording", "u4"), + # LSM 2.0 ends here + ("DisplayAspectX", "f8"), + ("DisplayAspectY", "f8"), + ("DisplayAspectZ", "f8"), + ("DisplayAspectTime", "f8"), + ("OffsetMeanOfRoisOverlay", "u4"), + ("OffsetTopoIsolineOverlay", "u4"), + ("OffsetTopoProfileOverlay", "u4"), + ("OffsetLinescanOverlay", "u4"), + ("ToolbarFlags", "u4"), + ("OffsetChannelWavelength", "u4"), + ("OffsetChannelFactors", "u4"), + ("ObjectiveSphereCorrection", "f8"), + ("OffsetUnmixParameters", "u4"), + # LSM 3.2, 4.0 end here + ("OffsetAcquisitionParameters", "u4"), + ("OffsetCharacteristics", "u4"), + ("OffsetPalette", "u4"), + ("TimeDifferenceX", "f8"), + ("TimeDifferenceY", "f8"), + ("TimeDifferenceZ", "f8"), + ("InternalUse1", "u4"), + ("DimensionP", "i4"), + ("DimensionM", "i4"), + ("DimensionsReserved", "16i4"), + ("OffsetTilePositions", "u4"), + ("", "9u4"), # Reserved + ("OffsetPositions", "u4"), + # ('', '21u4'), # must be 0 + ] + + def CZ_LSMINFO_READERS(): + # Import functions for CZ_LSMINFO sub-records + # TODO: read more CZ_LSMINFO sub-records + return { + "ScanInformation": read_lsm_scaninfo, + "TimeStamps": read_lsm_timestamps, + "EventList": read_lsm_eventlist, + "ChannelColors": read_lsm_channelcolors, + "Positions": read_lsm_floatpairs, + "TilePositions": read_lsm_floatpairs, + "VectorOverlay": None, + "InputLut": None, + "OutputLut": None, + "TimeIntervall": None, + "ChannelDataTypes": None, + "KsData": None, + "Roi": None, + "BleachRoi": None, + "NextRecording": None, + "MeanOfRoisOverlay": None, + "TopoIsolineOverlay": None, + "TopoProfileOverlay": None, + "ChannelWavelength": None, + "SphereCorrection": None, + "ChannelFactors": None, + "UnmixParameters": None, + "AcquisitionParameters": None, + "Characteristics": None, + } + + def CZ_LSMINFO_SCANTYPE(): + # Map CZ_LSMINFO.ScanType to dimension order + return { + 0: "XYZCT", # 'Stack' normal x-y-z-scan + 1: "XYZCT", # 'Z-Scan' x-z-plane Y=1 + 2: "XYZCT", # 'Line' + 3: "XYTCZ", # 'Time Series Plane' time series x-y XYCTZ ? Z=1 + 4: "XYZTC", # 'Time Series z-Scan' time series x-z + 5: "XYTCZ", # 'Time Series Mean-of-ROIs' + 6: "XYZTC", # 'Time Series Stack' time series x-y-z + 7: "XYCTZ", # Spline Scan + 8: "XYCZT", # Spline Plane x-z + 9: "XYTCZ", # Time Series Spline Plane x-z + 10: "XYZCT", # 'Time Series Point' point mode + } + + def CZ_LSMINFO_DIMENSIONS(): + # Map dimension codes to CZ_LSMINFO attribute + return { + "X": "DimensionX", + "Y": "DimensionY", + "Z": "DimensionZ", + "C": "DimensionChannels", + "T": "DimensionTime", + "P": "DimensionP", + "M": "DimensionM", + } + + def CZ_LSMINFO_DATATYPES(): + # Description of CZ_LSMINFO.DataType + return { + 0: "varying data types", + 1: "8 bit unsigned integer", + 2: "12 bit unsigned integer", + 5: "32 bit float", + } + + def CZ_LSMINFO_TYPEOFDATA(): + # Description of CZ_LSMINFO.TypeOfData + return { + 0: "Original scan data", + 1: "Calculated data", + 2: "3D reconstruction", + 3: "Topography height map", + } + + def CZ_LSMINFO_SCANINFO_ARRAYS(): + return { + 0x20000000: "Tracks", + 0x30000000: "Lasers", + 0x60000000: "DetectionChannels", + 0x80000000: "IlluminationChannels", + 0xA0000000: "BeamSplitters", + 0xC0000000: "DataChannels", + 0x11000000: "Timers", + 0x13000000: "Markers", + } + + def CZ_LSMINFO_SCANINFO_STRUCTS(): + return { + # 0x10000000: 'Recording', + 0x40000000: "Track", + 0x50000000: "Laser", + 0x70000000: "DetectionChannel", + 0x90000000: "IlluminationChannel", + 0xB0000000: "BeamSplitter", + 0xD0000000: "DataChannel", + 0x12000000: "Timer", + 0x14000000: "Marker", + } + + def CZ_LSMINFO_SCANINFO_ATTRIBUTES(): + return { + # Recording + 0x10000001: "Name", + 0x10000002: "Description", + 0x10000003: "Notes", + 0x10000004: "Objective", + 0x10000005: "ProcessingSummary", + 0x10000006: "SpecialScanMode", + 0x10000007: "ScanType", + 0x10000008: "ScanMode", + 0x10000009: "NumberOfStacks", + 0x1000000A: "LinesPerPlane", + 0x1000000B: "SamplesPerLine", + 0x1000000C: "PlanesPerVolume", + 0x1000000D: "ImagesWidth", + 0x1000000E: "ImagesHeight", + 0x1000000F: "ImagesNumberPlanes", + 0x10000010: "ImagesNumberStacks", + 0x10000011: "ImagesNumberChannels", + 0x10000012: "LinscanXySize", + 0x10000013: "ScanDirection", + 0x10000014: "TimeSeries", + 0x10000015: "OriginalScanData", + 0x10000016: "ZoomX", + 0x10000017: "ZoomY", + 0x10000018: "ZoomZ", + 0x10000019: "Sample0X", + 0x1000001A: "Sample0Y", + 0x1000001B: "Sample0Z", + 0x1000001C: "SampleSpacing", + 0x1000001D: "LineSpacing", + 0x1000001E: "PlaneSpacing", + 0x1000001F: "PlaneWidth", + 0x10000020: "PlaneHeight", + 0x10000021: "VolumeDepth", + 0x10000023: "Nutation", + 0x10000034: "Rotation", + 0x10000035: "Precession", + 0x10000036: "Sample0time", + 0x10000037: "StartScanTriggerIn", + 0x10000038: "StartScanTriggerOut", + 0x10000039: "StartScanEvent", + 0x10000040: "StartScanTime", + 0x10000041: "StopScanTriggerIn", + 0x10000042: "StopScanTriggerOut", + 0x10000043: "StopScanEvent", + 0x10000044: "StopScanTime", + 0x10000045: "UseRois", + 0x10000046: "UseReducedMemoryRois", + 0x10000047: "User", + 0x10000048: "UseBcCorrection", + 0x10000049: "PositionBcCorrection1", + 0x10000050: "PositionBcCorrection2", + 0x10000051: "InterpolationY", + 0x10000052: "CameraBinning", + 0x10000053: "CameraSupersampling", + 0x10000054: "CameraFrameWidth", + 0x10000055: "CameraFrameHeight", + 0x10000056: "CameraOffsetX", + 0x10000057: "CameraOffsetY", + 0x10000059: "RtBinning", + 0x1000005A: "RtFrameWidth", + 0x1000005B: "RtFrameHeight", + 0x1000005C: "RtRegionWidth", + 0x1000005D: "RtRegionHeight", + 0x1000005E: "RtOffsetX", + 0x1000005F: "RtOffsetY", + 0x10000060: "RtZoom", + 0x10000061: "RtLinePeriod", + 0x10000062: "Prescan", + 0x10000063: "ScanDirectionZ", + # Track + 0x40000001: "MultiplexType", # 0 After Line; 1 After Frame + 0x40000002: "MultiplexOrder", + 0x40000003: "SamplingMode", # 0 Sample; 1 Line Avg; 2 Frame Avg + 0x40000004: "SamplingMethod", # 1 Mean; 2 Sum + 0x40000005: "SamplingNumber", + 0x40000006: "Acquire", + 0x40000007: "SampleObservationTime", + 0x4000000B: "TimeBetweenStacks", + 0x4000000C: "Name", + 0x4000000D: "Collimator1Name", + 0x4000000E: "Collimator1Position", + 0x4000000F: "Collimator2Name", + 0x40000010: "Collimator2Position", + 0x40000011: "IsBleachTrack", + 0x40000012: "IsBleachAfterScanNumber", + 0x40000013: "BleachScanNumber", + 0x40000014: "TriggerIn", + 0x40000015: "TriggerOut", + 0x40000016: "IsRatioTrack", + 0x40000017: "BleachCount", + 0x40000018: "SpiCenterWavelength", + 0x40000019: "PixelTime", + 0x40000021: "CondensorFrontlens", + 0x40000023: "FieldStopValue", + 0x40000024: "IdCondensorAperture", + 0x40000025: "CondensorAperture", + 0x40000026: "IdCondensorRevolver", + 0x40000027: "CondensorFilter", + 0x40000028: "IdTransmissionFilter1", + 0x40000029: "IdTransmission1", + 0x40000030: "IdTransmissionFilter2", + 0x40000031: "IdTransmission2", + 0x40000032: "RepeatBleach", + 0x40000033: "EnableSpotBleachPos", + 0x40000034: "SpotBleachPosx", + 0x40000035: "SpotBleachPosy", + 0x40000036: "SpotBleachPosz", + 0x40000037: "IdTubelens", + 0x40000038: "IdTubelensPosition", + 0x40000039: "TransmittedLight", + 0x4000003A: "ReflectedLight", + 0x4000003B: "SimultanGrabAndBleach", + 0x4000003C: "BleachPixelTime", + # Laser + 0x50000001: "Name", + 0x50000002: "Acquire", + 0x50000003: "Power", + # DetectionChannel + 0x70000001: "IntegrationMode", + 0x70000002: "SpecialMode", + 0x70000003: "DetectorGainFirst", + 0x70000004: "DetectorGainLast", + 0x70000005: "AmplifierGainFirst", + 0x70000006: "AmplifierGainLast", + 0x70000007: "AmplifierOffsFirst", + 0x70000008: "AmplifierOffsLast", + 0x70000009: "PinholeDiameter", + 0x7000000A: "CountingTrigger", + 0x7000000B: "Acquire", + 0x7000000C: "PointDetectorName", + 0x7000000D: "AmplifierName", + 0x7000000E: "PinholeName", + 0x7000000F: "FilterSetName", + 0x70000010: "FilterName", + 0x70000013: "IntegratorName", + 0x70000014: "ChannelName", + 0x70000015: "DetectorGainBc1", + 0x70000016: "DetectorGainBc2", + 0x70000017: "AmplifierGainBc1", + 0x70000018: "AmplifierGainBc2", + 0x70000019: "AmplifierOffsetBc1", + 0x70000020: "AmplifierOffsetBc2", + 0x70000021: "SpectralScanChannels", + 0x70000022: "SpiWavelengthStart", + 0x70000023: "SpiWavelengthStop", + 0x70000026: "DyeName", + 0x70000027: "DyeFolder", + # IlluminationChannel + 0x90000001: "Name", + 0x90000002: "Power", + 0x90000003: "Wavelength", + 0x90000004: "Aquire", + 0x90000005: "DetchannelName", + 0x90000006: "PowerBc1", + 0x90000007: "PowerBc2", + # BeamSplitter + 0xB0000001: "FilterSet", + 0xB0000002: "Filter", + 0xB0000003: "Name", + # DataChannel + 0xD0000001: "Name", + 0xD0000003: "Acquire", + 0xD0000004: "Color", + 0xD0000005: "SampleType", + 0xD0000006: "BitsPerSample", + 0xD0000007: "RatioType", + 0xD0000008: "RatioTrack1", + 0xD0000009: "RatioTrack2", + 0xD000000A: "RatioChannel1", + 0xD000000B: "RatioChannel2", + 0xD000000C: "RatioConst1", + 0xD000000D: "RatioConst2", + 0xD000000E: "RatioConst3", + 0xD000000F: "RatioConst4", + 0xD0000010: "RatioConst5", + 0xD0000011: "RatioConst6", + 0xD0000012: "RatioFirstImages1", + 0xD0000013: "RatioFirstImages2", + 0xD0000014: "DyeName", + 0xD0000015: "DyeFolder", + 0xD0000016: "Spectrum", + 0xD0000017: "Acquire", + # Timer + 0x12000001: "Name", + 0x12000002: "Description", + 0x12000003: "Interval", + 0x12000004: "TriggerIn", + 0x12000005: "TriggerOut", + 0x12000006: "ActivationTime", + 0x12000007: "ActivationNumber", + # Marker + 0x14000001: "Name", + 0x14000002: "Description", + 0x14000003: "TriggerIn", + 0x14000004: "TriggerOut", + } + + def NIH_IMAGE_HEADER(): + return [ + ("FileID", "a8"), + ("nLines", "i2"), + ("PixelsPerLine", "i2"), + ("Version", "i2"), + ("OldLutMode", "i2"), + ("OldnColors", "i2"), + ("Colors", "u1", (3, 32)), + ("OldColorStart", "i2"), + ("ColorWidth", "i2"), + ("ExtraColors", "u2", (6, 3)), + ("nExtraColors", "i2"), + ("ForegroundIndex", "i2"), + ("BackgroundIndex", "i2"), + ("XScale", "f8"), + ("Unused2", "i2"), + ("Unused3", "i2"), + ("UnitsID", "i2"), # NIH_UNITS_TYPE + ("p1", [("x", "i2"), ("y", "i2")]), + ("p2", [("x", "i2"), ("y", "i2")]), + ("CurveFitType", "i2"), # NIH_CURVEFIT_TYPE + ("nCoefficients", "i2"), + ("Coeff", "f8", 6), + ("UMsize", "u1"), + ("UM", "a15"), + ("UnusedBoolean", "u1"), + ("BinaryPic", "b1"), + ("SliceStart", "i2"), + ("SliceEnd", "i2"), + ("ScaleMagnification", "f4"), + ("nSlices", "i2"), + ("SliceSpacing", "f4"), + ("CurrentSlice", "i2"), + ("FrameInterval", "f4"), + ("PixelAspectRatio", "f4"), + ("ColorStart", "i2"), + ("ColorEnd", "i2"), + ("nColors", "i2"), + ("Fill1", "3u2"), + ("Fill2", "3u2"), + ("Table", "u1"), # NIH_COLORTABLE_TYPE + ("LutMode", "u1"), # NIH_LUTMODE_TYPE + ("InvertedTable", "b1"), + ("ZeroClip", "b1"), + ("XUnitSize", "u1"), + ("XUnit", "a11"), + ("StackType", "i2"), # NIH_STACKTYPE_TYPE + # ('UnusedBytes', 'u1', 200) + ] + + def NIH_COLORTABLE_TYPE(): + return ( + "CustomTable", + "AppleDefault", + "Pseudo20", + "Pseudo32", + "Rainbow", + "Fire1", + "Fire2", + "Ice", + "Grays", + "Spectrum", + ) + + def NIH_LUTMODE_TYPE(): + return ( + "PseudoColor", + "OldAppleDefault", + "OldSpectrum", + "GrayScale", + "ColorLut", + "CustomGrayscale", + ) + + def NIH_CURVEFIT_TYPE(): + return ( + "StraightLine", + "Poly2", + "Poly3", + "Poly4", + "Poly5", + "ExpoFit", + "PowerFit", + "LogFit", + "RodbardFit", + "SpareFit1", + "Uncalibrated", + "UncalibratedOD", + ) + + def NIH_UNITS_TYPE(): + return ( + "Nanometers", + "Micrometers", + "Millimeters", + "Centimeters", + "Meters", + "Kilometers", + "Inches", + "Feet", + "Miles", + "Pixels", + "OtherUnits", + ) + + def NIH_STACKTYPE_TYPE(): + return ("VolumeStack", "RGBStack", "MovieStack", "HSVStack") + + def TVIPS_HEADER_V1(): + # TVIPS TemData structure from EMMENU Help file + return [ + ("Version", "i4"), + ("CommentV1", "a80"), + ("HighTension", "i4"), + ("SphericalAberration", "i4"), + ("IlluminationAperture", "i4"), + ("Magnification", "i4"), + ("PostMagnification", "i4"), + ("FocalLength", "i4"), + ("Defocus", "i4"), + ("Astigmatism", "i4"), + ("AstigmatismDirection", "i4"), + ("BiprismVoltage", "i4"), + ("SpecimenTiltAngle", "i4"), + ("SpecimenTiltDirection", "i4"), + ("IlluminationTiltDirection", "i4"), + ("IlluminationTiltAngle", "i4"), + ("ImageMode", "i4"), + ("EnergySpread", "i4"), + ("ChromaticAberration", "i4"), + ("ShutterType", "i4"), + ("DefocusSpread", "i4"), + ("CcdNumber", "i4"), + ("CcdSize", "i4"), + ("OffsetXV1", "i4"), + ("OffsetYV1", "i4"), + ("PhysicalPixelSize", "i4"), + ("Binning", "i4"), + ("ReadoutSpeed", "i4"), + ("GainV1", "i4"), + ("SensitivityV1", "i4"), + ("ExposureTimeV1", "i4"), + ("FlatCorrected", "i4"), + ("DeadPxCorrected", "i4"), + ("ImageMean", "i4"), + ("ImageStd", "i4"), + ("DisplacementX", "i4"), + ("DisplacementY", "i4"), + ("DateV1", "i4"), + ("TimeV1", "i4"), + ("ImageMin", "i4"), + ("ImageMax", "i4"), + ("ImageStatisticsQuality", "i4"), + ] + + def TVIPS_HEADER_V2(): + return [ + ("ImageName", "V160"), # utf16 + ("ImageFolder", "V160"), + ("ImageSizeX", "i4"), + ("ImageSizeY", "i4"), + ("ImageSizeZ", "i4"), + ("ImageSizeE", "i4"), + ("ImageDataType", "i4"), + ("Date", "i4"), + ("Time", "i4"), + ("Comment", "V1024"), + ("ImageHistory", "V1024"), + ("Scaling", "16f4"), + ("ImageStatistics", "16c16"), + ("ImageType", "i4"), + ("ImageDisplaType", "i4"), + ("PixelSizeX", "f4"), # distance between two px in x, [nm] + ("PixelSizeY", "f4"), # distance between two px in y, [nm] + ("ImageDistanceZ", "f4"), + ("ImageDistanceE", "f4"), + ("ImageMisc", "32f4"), + ("TemType", "V160"), + ("TemHighTension", "f4"), + ("TemAberrations", "32f4"), + ("TemEnergy", "32f4"), + ("TemMode", "i4"), + ("TemMagnification", "f4"), + ("TemMagnificationCorrection", "f4"), + ("PostMagnification", "f4"), + ("TemStageType", "i4"), + ("TemStagePosition", "5f4"), # x, y, z, a, b + ("TemImageShift", "2f4"), + ("TemBeamShift", "2f4"), + ("TemBeamTilt", "2f4"), + ("TilingParameters", "7f4"), # 0: tiling? 1:x 2:y 3: max x + # 4: max y 5: overlap x 6: overlap y + ("TemIllumination", "3f4"), # 0: spotsize 1: intensity + ("TemShutter", "i4"), + ("TemMisc", "32f4"), + ("CameraType", "V160"), + ("PhysicalPixelSizeX", "f4"), + ("PhysicalPixelSizeY", "f4"), + ("OffsetX", "i4"), + ("OffsetY", "i4"), + ("BinningX", "i4"), + ("BinningY", "i4"), + ("ExposureTime", "f4"), + ("Gain", "f4"), + ("ReadoutRate", "f4"), + ("FlatfieldDescription", "V160"), + ("Sensitivity", "f4"), + ("Dose", "f4"), + ("CamMisc", "32f4"), + ("FeiMicroscopeInformation", "V1024"), + ("FeiSpecimenInformation", "V1024"), + ("Magic", "u4"), + ] + + def MM_HEADER(): + # Olympus FluoView MM_Header + MM_DIMENSION = [ + ("Name", "a16"), + ("Size", "i4"), + ("Origin", "f8"), + ("Resolution", "f8"), + ("Unit", "a64"), + ] + return [ + ("HeaderFlag", "i2"), + ("ImageType", "u1"), + ("ImageName", "a257"), + ("OffsetData", "u4"), + ("PaletteSize", "i4"), + ("OffsetPalette0", "u4"), + ("OffsetPalette1", "u4"), + ("CommentSize", "i4"), + ("OffsetComment", "u4"), + ("Dimensions", MM_DIMENSION, 10), + ("OffsetPosition", "u4"), + ("MapType", "i2"), + ("MapMin", "f8"), + ("MapMax", "f8"), + ("MinValue", "f8"), + ("MaxValue", "f8"), + ("OffsetMap", "u4"), + ("Gamma", "f8"), + ("Offset", "f8"), + ("GrayChannel", MM_DIMENSION), + ("OffsetThumbnail", "u4"), + ("VoiceField", "i4"), + ("OffsetVoiceField", "u4"), + ] + + def MM_DIMENSIONS(): + # Map FluoView MM_Header.Dimensions to axes characters + return { + "X": "X", + "Y": "Y", + "Z": "Z", + "T": "T", + "CH": "C", + "WAVELENGTH": "C", + "TIME": "T", + "XY": "R", + "EVENT": "V", + "EXPOSURE": "L", + } + + def UIC_TAGS(): + # Map Universal Imaging Corporation MetaMorph internal tag ids to + # name and type + from fractions import Fraction # delayed import + + return [ + ("AutoScale", int), + ("MinScale", int), + ("MaxScale", int), + ("SpatialCalibration", int), + ("XCalibration", Fraction), + ("YCalibration", Fraction), + ("CalibrationUnits", str), + ("Name", str), + ("ThreshState", int), + ("ThreshStateRed", int), + ("tagid_10", None), # undefined + ("ThreshStateGreen", int), + ("ThreshStateBlue", int), + ("ThreshStateLo", int), + ("ThreshStateHi", int), + ("Zoom", int), + ("CreateTime", julian_datetime), + ("LastSavedTime", julian_datetime), + ("currentBuffer", int), + ("grayFit", None), + ("grayPointCount", None), + ("grayX", Fraction), + ("grayY", Fraction), + ("grayMin", Fraction), + ("grayMax", Fraction), + ("grayUnitName", str), + ("StandardLUT", int), + ("wavelength", int), + ("StagePosition", "(%i,2,2)u4"), # N xy positions as fract + ("CameraChipOffset", "(%i,2,2)u4"), # N xy offsets as fract + ("OverlayMask", None), + ("OverlayCompress", None), + ("Overlay", None), + ("SpecialOverlayMask", None), + ("SpecialOverlayCompress", None), + ("SpecialOverlay", None), + ("ImageProperty", read_uic_image_property), + ("StageLabel", "%ip"), # N str + ("AutoScaleLoInfo", Fraction), + ("AutoScaleHiInfo", Fraction), + ("AbsoluteZ", "(%i,2)u4"), # N fractions + ("AbsoluteZValid", "(%i,)u4"), # N long + ("Gamma", "I"), # 'I' uses offset + ("GammaRed", "I"), + ("GammaGreen", "I"), + ("GammaBlue", "I"), + ("CameraBin", "2I"), + ("NewLUT", int), + ("ImagePropertyEx", None), + ("PlaneProperty", int), + ("UserLutTable", "(256,3)u1"), + ("RedAutoScaleInfo", int), + ("RedAutoScaleLoInfo", Fraction), + ("RedAutoScaleHiInfo", Fraction), + ("RedMinScaleInfo", int), + ("RedMaxScaleInfo", int), + ("GreenAutoScaleInfo", int), + ("GreenAutoScaleLoInfo", Fraction), + ("GreenAutoScaleHiInfo", Fraction), + ("GreenMinScaleInfo", int), + ("GreenMaxScaleInfo", int), + ("BlueAutoScaleInfo", int), + ("BlueAutoScaleLoInfo", Fraction), + ("BlueAutoScaleHiInfo", Fraction), + ("BlueMinScaleInfo", int), + ("BlueMaxScaleInfo", int), + # ('OverlayPlaneColor', read_uic_overlay_plane_color), + ] + + def PILATUS_HEADER(): + # PILATUS CBF Header Specification, Version 1.4 + # Map key to [value_indices], type + return { + "Detector": ([slice(1, None)], str), + "Pixel_size": ([1, 4], float), + "Silicon": ([3], float), + "Exposure_time": ([1], float), + "Exposure_period": ([1], float), + "Tau": ([1], float), + "Count_cutoff": ([1], int), + "Threshold_setting": ([1], float), + "Gain_setting": ([1, 2], str), + "N_excluded_pixels": ([1], int), + "Excluded_pixels": ([1], str), + "Flat_field": ([1], str), + "Trim_file": ([1], str), + "Image_path": ([1], str), + # optional + "Wavelength": ([1], float), + "Energy_range": ([1, 2], float), + "Detector_distance": ([1], float), + "Detector_Voffset": ([1], float), + "Beam_xy": ([1, 2], float), + "Flux": ([1], str), + "Filter_transmission": ([1], float), + "Start_angle": ([1], float), + "Angle_increment": ([1], float), + "Detector_2theta": ([1], float), + "Polarization": ([1], float), + "Alpha": ([1], float), + "Kappa": ([1], float), + "Phi": ([1], float), + "Phi_increment": ([1], float), + "Chi": ([1], float), + "Chi_increment": ([1], float), + "Oscillation_axis": ([slice(1, None)], str), + "N_oscillations": ([1], int), + "Start_position": ([1], float), + "Position_increment": ([1], float), + "Shutter_time": ([1], float), + "Omega": ([1], float), + "Omega_increment": ([1], float), + } + + def REVERSE_BITORDER_BYTES(): + # Bytes with reversed bitorder + return ( + b"\x00\x80@\xc0 \xa0`\xe0\x10\x90P\xd00\xb0p\xf0\x08\x88H\xc8(" + b"\xa8h\xe8\x18\x98X\xd88\xb8x\xf8\x04\x84D\xc4$\xa4d\xe4\x14" + b"\x94T\xd44\xb4t\xf4\x0c\x8cL\xcc,\xacl\xec\x1c\x9c\\\xdc<\xbc|" + b'\xfc\x02\x82B\xc2"\xa2b\xe2\x12\x92R\xd22\xb2r\xf2\n\x8aJ\xca*' + b"\xaaj\xea\x1a\x9aZ\xda:\xbaz\xfa\x06\x86F\xc6&\xa6f\xe6\x16" + b"\x96V\xd66\xb6v\xf6\x0e\x8eN\xce.\xaen\xee\x1e\x9e^\xde>\xbe~" + b"\xfe\x01\x81A\xc1!\xa1a\xe1\x11\x91Q\xd11\xb1q\xf1\t\x89I\xc9)" + b"\xa9i\xe9\x19\x99Y\xd99\xb9y\xf9\x05\x85E\xc5%\xa5e\xe5\x15" + b"\x95U\xd55\xb5u\xf5\r\x8dM\xcd-\xadm\xed\x1d\x9d]\xdd=\xbd}" + b"\xfd\x03\x83C\xc3#\xa3c\xe3\x13\x93S\xd33\xb3s\xf3\x0b\x8bK" + b"\xcb+\xabk\xeb\x1b\x9b[\xdb;\xbb{\xfb\x07\x87G\xc7'\xa7g\xe7" + b"\x17\x97W\xd77\xb7w\xf7\x0f\x8fO\xcf/\xafo\xef\x1f\x9f_" + b"\xdf?\xbf\x7f\xff" + ) + + def REVERSE_BITORDER_ARRAY(): + # Numpy array of bytes with reversed bitorder + return numpy.frombuffer(TIFF.REVERSE_BITORDER_BYTES, dtype="uint8") + + def ALLOCATIONGRANULARITY(): + # alignment for writing contiguous data to TIFF + import mmap # delayed import + + return mmap.ALLOCATIONGRANULARITY + + +def read_tags(fh, byteorder, offsetsize, tagnames, customtags=None, maxifds=None): + """Read tags from chain of IFDs and return as list of dicts. + + The file handle position must be at a valid IFD header. + + """ + if offsetsize == 4: + offsetformat = byteorder + "I" + tagnosize = 2 + tagnoformat = byteorder + "H" + tagsize = 12 + tagformat1 = byteorder + "HH" + tagformat2 = byteorder + "I4s" + elif offsetsize == 8: + offsetformat = byteorder + "Q" + tagnosize = 8 + tagnoformat = byteorder + "Q" + tagsize = 20 + tagformat1 = byteorder + "HH" + tagformat2 = byteorder + "Q8s" + else: + raise ValueError("invalid offset size") + + if customtags is None: + customtags = {} + if maxifds is None: + maxifds = 2**32 + + result = [] + unpack = struct.unpack + offset = fh.tell() + while len(result) < maxifds: + # loop over IFDs + try: + tagno = unpack(tagnoformat, fh.read(tagnosize))[0] + if tagno > 4096: + raise ValueError("suspicious number of tags") + except Exception: + warnings.warn("corrupted tag list at offset %i" % offset) + break + + tags = {} + data = fh.read(tagsize * tagno) + pos = fh.tell() + index = 0 + for _ in range(tagno): + code, type_ = unpack(tagformat1, data[index : index + 4]) + count, value = unpack(tagformat2, data[index + 4 : index + tagsize]) + index += tagsize + name = tagnames.get(code, str(code)) + try: + dtype = TIFF.DATA_FORMATS[type_] + except KeyError: + raise TiffTag.Error("unknown tag data type %i" % type_) + + fmt = "%s%i%s" % (byteorder, count * int(dtype[0]), dtype[1]) + size = struct.calcsize(fmt) + if size > offsetsize or code in customtags: + offset = unpack(offsetformat, value)[0] + if offset < 8 or offset > fh.size - size: + raise TiffTag.Error("invalid tag value offset %i" % offset) + fh.seek(offset) + if code in customtags: + readfunc = customtags[code][1] + value = readfunc(fh, byteorder, dtype, count, offsetsize) + elif type_ == 7 or (count > 1 and dtype[-1] == "B"): + value = read_bytes(fh, byteorder, dtype, count, offsetsize) + elif code in tagnames or dtype[-1] == "s": + value = unpack(fmt, fh.read(size)) + else: + value = read_numpy(fh, byteorder, dtype, count, offsetsize) + elif dtype[-1] == "B" or type_ == 7: + value = value[:size] + else: + value = unpack(fmt, value[:size]) + + if code not in customtags and code not in TIFF.TAG_TUPLE: + if len(value) == 1: + value = value[0] + if type_ != 7 and dtype[-1] == "s" and isinstance(value, bytes): + # TIFF ASCII fields can contain multiple strings, + # each terminated with a NUL + try: + value = bytes2str(stripascii(value).strip()) + except UnicodeDecodeError: + warnings.warn("tag %i: coercing invalid ASCII to bytes" % code) + + tags[name] = value + + result.append(tags) + # read offset to next page + fh.seek(pos) + offset = unpack(offsetformat, fh.read(offsetsize))[0] + if offset == 0: + break + if offset >= fh.size: + warnings.warn("invalid page offset %i" % offset) + break + fh.seek(offset) + + if result and maxifds == 1: + result = result[0] + return result + + +def read_exif_ifd(fh, byteorder, dtype, count, offsetsize): + """Read EXIF tags from file and return as dict.""" + exif = read_tags(fh, byteorder, offsetsize, TIFF.EXIF_TAGS, maxifds=1) + for name in ("ExifVersion", "FlashpixVersion"): + try: + exif[name] = bytes2str(exif[name]) + except Exception: + pass + if "UserComment" in exif: + idcode = exif["UserComment"][:8] + try: + if idcode == b"ASCII\x00\x00\x00": + exif["UserComment"] = bytes2str(exif["UserComment"][8:]) + elif idcode == b"UNICODE\x00": + exif["UserComment"] = exif["UserComment"][8:].decode("utf-16") + except Exception: + pass + return exif + + +def read_gps_ifd(fh, byteorder, dtype, count, offsetsize): + """Read GPS tags from file and return as dict.""" + return read_tags(fh, byteorder, offsetsize, TIFF.GPS_TAGS, maxifds=1) + + +def read_interoperability_ifd(fh, byteorder, dtype, count, offsetsize): + """Read Interoperability tags from file and return as dict.""" + tag_names = {1: "InteroperabilityIndex"} + return read_tags(fh, byteorder, offsetsize, tag_names, maxifds=1) + + +def read_bytes(fh, byteorder, dtype, count, offsetsize): + """Read tag data from file and return as byte string.""" + dtype = "B" if dtype[-1] == "s" else byteorder + dtype[-1] + count *= numpy.dtype(dtype).itemsize + data = fh.read(count) + if len(data) != count: + warnings.warn("failed to read all bytes: %i, %i" % (len(data), count)) + return data + + +def read_utf8(fh, byteorder, dtype, count, offsetsize): + """Read tag data from file and return as unicode string.""" + return fh.read(count).decode("utf-8") + + +def read_numpy(fh, byteorder, dtype, count, offsetsize): + """Read tag data from file and return as numpy array.""" + dtype = "b" if dtype[-1] == "s" else byteorder + dtype[-1] + return fh.read_array(dtype, count) + + +def read_colormap(fh, byteorder, dtype, count, offsetsize): + """Read ColorMap data from file and return as numpy array.""" + cmap = fh.read_array(byteorder + dtype[-1], count) + cmap.shape = (3, -1) + return cmap + + +def read_json(fh, byteorder, dtype, count, offsetsize): + """Read JSON tag data from file and return as object.""" + data = fh.read(count) + try: + return json.loads(unicode(stripnull(data), "utf-8")) + except ValueError: + warnings.warn("invalid JSON '%s'" % data) + + +def read_mm_header(fh, byteorder, dtype, count, offsetsize): + """Read FluoView mm_header tag from file and return as dict.""" + mmh = fh.read_record(TIFF.MM_HEADER, byteorder=byteorder) + mmh = recarray2dict(mmh) + mmh["Dimensions"] = [ + (bytes2str(d[0]).strip(), d[1], d[2], d[3], bytes2str(d[4]).strip()) + for d in mmh["Dimensions"] + ] + d = mmh["GrayChannel"] + mmh["GrayChannel"] = ( + bytes2str(d[0]).strip(), + d[1], + d[2], + d[3], + bytes2str(d[4]).strip(), + ) + return mmh + + +def read_mm_stamp(fh, byteorder, dtype, count, offsetsize): + """Read FluoView mm_stamp tag from file and return as numpy.ndarray.""" + return fh.read_array(byteorder + "f8", 8) + + +def read_uic1tag(fh, byteorder, dtype, count, offsetsize, planecount=None): + """Read MetaMorph STK UIC1Tag from file and return as dict. + + Return empty dictionary if planecount is unknown. + + """ + assert dtype in ("2I", "1I") and byteorder == "<" + result = {} + if dtype == "2I": + # pre MetaMorph 2.5 (not tested) + values = fh.read_array(" structure_size: + break + lsminfo.append((name, dtype)) + else: + lsminfo = TIFF.CZ_LSMINFO + + lsminfo = fh.read_record(lsminfo, byteorder=byteorder) + lsminfo = recarray2dict(lsminfo) + + # read LSM info subrecords at offsets + for name, reader in TIFF.CZ_LSMINFO_READERS.items(): + if reader is None: + continue + offset = lsminfo.get("Offset" + name, 0) + if offset < 8: + continue + fh.seek(offset) + try: + lsminfo[name] = reader(fh) + except ValueError: + pass + return lsminfo + + +def read_lsm_floatpairs(fh): + """Read LSM sequence of float pairs from file and return as list.""" + size = struct.unpack(" 0: + esize, etime, etype = struct.unpack(" 4: + size = struct.unpack(" 1 else {} + return frame_data, roi_data + + +def read_micromanager_metadata(fh): + """Read MicroManager non-TIFF settings from open file and return as dict. + + The settings can be used to read image data without parsing the TIFF file. + + Raise ValueError if the file does not contain valid MicroManager metadata. + + """ + fh.seek(0) + try: + byteorder = {b"II": "<", b"MM": ">"}[fh.read(2)] + except IndexError: + raise ValueError("not a MicroManager TIFF file") + + result = {} + fh.seek(8) + ( + index_header, + index_offset, + display_header, + display_offset, + comments_header, + comments_offset, + summary_header, + summary_length, + ) = struct.unpack(byteorder + "IIIIIIII", fh.read(32)) + + if summary_header != 2355492: + raise ValueError("invalid MicroManager summary header") + result["Summary"] = read_json(fh, byteorder, None, summary_length, None) + + if index_header != 54773648: + raise ValueError("invalid MicroManager index header") + fh.seek(index_offset) + header, count = struct.unpack(byteorder + "II", fh.read(8)) + if header != 3453623: + raise ValueError("invalid MicroManager index header") + data = struct.unpack(byteorder + "IIIII" * count, fh.read(20 * count)) + result["IndexMap"] = { + "Channel": data[::5], + "Slice": data[1::5], + "Frame": data[2::5], + "Position": data[3::5], + "Offset": data[4::5], + } + + if display_header != 483765892: + raise ValueError("invalid MicroManager display header") + fh.seek(display_offset) + header, count = struct.unpack(byteorder + "II", fh.read(8)) + if header != 347834724: + raise ValueError("invalid MicroManager display header") + result["DisplaySettings"] = read_json(fh, byteorder, None, count, None) + + if comments_header != 99384722: + raise ValueError("invalid MicroManager comments header") + fh.seek(comments_offset) + header, count = struct.unpack(byteorder + "II", fh.read(8)) + if header != 84720485: + raise ValueError("invalid MicroManager comments header") + result["Comments"] = read_json(fh, byteorder, None, count, None) + + return result + + +def read_metaseries_catalog(fh): + """Read MetaSeries non-TIFF hint catalog from file. + + Raise ValueError if the file does not contain a valid hint catalog. + + """ + # TODO: implement read_metaseries_catalog + raise NotImplementedError() + + +def imagej_metadata_tags(metadata, byteorder): + """Return IJMetadata and IJMetadataByteCounts tags from metadata dict. + + The tags can be passed to the TiffWriter.save function as extratags. + + The metadata dict may contain the following keys and values: + + Info : str + Human-readable information as string. + Labels : sequence of str + Human-readable labels for each channel. + Ranges : sequence of doubles + Lower and upper values for each channel. + LUTs : sequence of (3, 256) uint8 ndarrays + Color palettes for each channel. + Plot : bytes + Undocumented ImageJ internal format. + ROI: bytes + Undocumented ImageJ internal region of interest format. + Overlays : bytes + Undocumented ImageJ internal format. + + """ + header = [{">": b"IJIJ", "<": b"JIJI"}[byteorder]] + bytecounts = [0] + body = [] + + def _string(data, byteorder): + return data.encode("utf-16" + {">": "be", "<": "le"}[byteorder]) + + def _doubles(data, byteorder): + return struct.pack(byteorder + ("d" * len(data)), *data) + + def _ndarray(data, byteorder): + return data.tobytes() + + def _bytes(data, byteorder): + return data + + metadata_types = ( + ("Info", b"info", 1, _string), + ("Labels", b"labl", None, _string), + ("Ranges", b"rang", 1, _doubles), + ("LUTs", b"luts", None, _ndarray), + ("Plot", b"plot", 1, _bytes), + ("ROI", b"roi ", 1, _bytes), + ("Overlays", b"over", None, _bytes), + ) + + for key, mtype, count, func in metadata_types: + if key.lower() in metadata: + key = key.lower() + elif key not in metadata: + continue + if byteorder == "<": + mtype = mtype[::-1] + values = metadata[key] + if count is None: + count = len(values) + else: + values = [values] + header.append(mtype + struct.pack(byteorder + "I", count)) + for value in values: + data = func(value, byteorder) + body.append(data) + bytecounts.append(len(data)) + + if not body: + return () + body = b"".join(body) + header = b"".join(header) + data = header + body + bytecounts[0] = len(header) + bytecounts = struct.pack(byteorder + ("I" * len(bytecounts)), *bytecounts) + return ( + (50839, "B", len(data), data, True), + (50838, "I", len(bytecounts) // 4, bytecounts, True), + ) + + +def imagej_metadata(data, bytecounts, byteorder): + """Return IJMetadata tag value as dict. + + The 'Info' string can have multiple formats, e.g. OIF or ScanImage, + that might be parsed into dicts using the matlabstr2py or + oiffile.SettingsFile functions. + + """ + + def _string(data, byteorder): + return data.decode("utf-16" + {">": "be", "<": "le"}[byteorder]) + + def _doubles(data, byteorder): + return struct.unpack(byteorder + ("d" * (len(data) // 8)), data) + + def _lut(data, byteorder): + return numpy.frombuffer(data, "uint8").reshape(-1, 256) + + def _bytes(data, byteorder): + return data + + metadata_types = { # big-endian + b"info": ("Info", _string), + b"labl": ("Labels", _string), + b"rang": ("Ranges", _doubles), + b"luts": ("LUTs", _lut), + b"plot": ("Plots", _bytes), + b"roi ": ("ROI", _bytes), + b"over": ("Overlays", _bytes), + } + metadata_types.update( # little-endian + dict((k[::-1], v) for k, v in metadata_types.items()) + ) + + if not bytecounts: + raise ValueError("no ImageJ metadata") + + if data[:4] not in (b"IJIJ", b"JIJI"): + raise ValueError("invalid ImageJ metadata") + + header_size = bytecounts[0] + if header_size < 12 or header_size > 804: + raise ValueError("invalid ImageJ metadata header size") + + ntypes = (header_size - 4) // 8 + header = struct.unpack(byteorder + "4sI" * ntypes, data[4 : 4 + ntypes * 8]) + pos = 4 + ntypes * 8 + counter = 0 + result = {} + for mtype, count in zip(header[::2], header[1::2]): + values = [] + name, func = metadata_types.get(mtype, (bytes2str(mtype), read_bytes)) + for _ in range(count): + counter += 1 + pos1 = pos + bytecounts[counter] + values.append(func(data[pos:pos1], byteorder)) + pos = pos1 + result[name.strip()] = values[0] if count == 1 else values + return result + + +def imagej_description_metadata(description): + """Return metatata from ImageJ image description as dict. + + Raise ValueError if not a valid ImageJ description. + + >>> description = 'ImageJ=1.11a\\nimages=510\\nhyperstack=true\\n' + >>> imagej_description_metadata(description) # doctest: +SKIP + {'ImageJ': '1.11a', 'images': 510, 'hyperstack': True} + + """ + + def _bool(val): + return {"true": True, "false": False}[val.lower()] + + result = {} + for line in description.splitlines(): + try: + key, val = line.split("=") + except Exception: + continue + key = key.strip() + val = val.strip() + for dtype in (int, float, _bool): + try: + val = dtype(val) + break + except Exception: + pass + result[key] = val + + if "ImageJ" not in result: + raise ValueError("not a ImageJ image description") + return result + + +def imagej_description( + shape, + rgb=None, + colormaped=False, + version="1.11a", + hyperstack=None, + mode=None, + loop=None, + **kwargs +): + """Return ImageJ image description from data shape. + + ImageJ can handle up to 6 dimensions in order TZCYXS. + + >>> imagej_description((51, 5, 2, 196, 171)) # doctest: +SKIP + ImageJ=1.11a + images=510 + channels=2 + slices=5 + frames=51 + hyperstack=true + mode=grayscale + loop=false + + """ + if colormaped: + raise NotImplementedError("ImageJ colormapping not supported") + shape = imagej_shape(shape, rgb=rgb) + rgb = shape[-1] in (3, 4) + + result = ["ImageJ=%s" % version] + append = [] + result.append("images=%i" % product(shape[:-3])) + if hyperstack is None: + hyperstack = True + append.append("hyperstack=true") + else: + append.append("hyperstack=%s" % bool(hyperstack)) + if shape[2] > 1: + result.append("channels=%i" % shape[2]) + if mode is None and not rgb: + mode = "grayscale" + if hyperstack and mode: + append.append("mode=%s" % mode) + if shape[1] > 1: + result.append("slices=%i" % shape[1]) + if shape[0] > 1: + result.append("frames=%i" % shape[0]) + if loop is None: + append.append("loop=false") + if loop is not None: + append.append("loop=%s" % bool(loop)) + for key, value in kwargs.items(): + append.append("%s=%s" % (key.lower(), value)) + + return "\n".join(result + append + [""]) + + +def imagej_shape(shape, rgb=None): + """Return shape normalized to 6D ImageJ hyperstack TZCYXS. + + Raise ValueError if not a valid ImageJ hyperstack shape. + + >>> imagej_shape((2, 3, 4, 5, 3), False) + (2, 3, 4, 5, 3, 1) + + """ + shape = tuple(int(i) for i in shape) + ndim = len(shape) + if 1 > ndim > 6: + raise ValueError("invalid ImageJ hyperstack: not 2 to 6 dimensional") + if rgb is None: + rgb = shape[-1] in (3, 4) and ndim > 2 + if rgb and shape[-1] not in (3, 4): + raise ValueError("invalid ImageJ hyperstack: not a RGB image") + if not rgb and ndim == 6 and shape[-1] != 1: + raise ValueError("invalid ImageJ hyperstack: not a non-RGB image") + if rgb or shape[-1] == 1: + return (1,) * (6 - ndim) + shape + return (1,) * (5 - ndim) + shape + (1,) + + +def json_description(shape, **metadata): + """Return JSON image description from data shape and other meta data. + + Return UTF-8 encoded JSON. + + >>> json_description((256, 256, 3), axes='YXS') # doctest: +SKIP + b'{"shape": [256, 256, 3], "axes": "YXS"}' + + """ + metadata.update(shape=shape) + return json.dumps(metadata) # .encode('utf-8') + + +def json_description_metadata(description): + """Return metatata from JSON formatted image description as dict. + + Raise ValuError if description is of unknown format. + + >>> description = '{"shape": [256, 256, 3], "axes": "YXS"}' + >>> json_description_metadata(description) # doctest: +SKIP + {'shape': [256, 256, 3], 'axes': 'YXS'} + >>> json_description_metadata('shape=(256, 256, 3)') + {'shape': (256, 256, 3)} + + """ + if description[:6] == "shape=": + # old style 'shaped' description; not JSON + shape = tuple(int(i) for i in description[7:-1].split(",")) + return dict(shape=shape) + if description[:1] == "{" and description[-1:] == "}": + # JSON description + return json.loads(description) + raise ValueError("invalid JSON image description", description) + + +def fluoview_description_metadata(description, ignoresections=None): + """Return metatata from FluoView image description as dict. + + The FluoView image description format is unspecified. Expect failures. + + >>> descr = ('[Intensity Mapping]\\nMap Ch0: Range=00000 to 02047\\n' + ... '[Intensity Mapping End]') + >>> fluoview_description_metadata(descr) + {'Intensity Mapping': {'Map Ch0: Range': '00000 to 02047'}} + + """ + if not description.startswith("["): + raise ValueError("invalid FluoView image description") + if ignoresections is None: + ignoresections = {"Region Info (Fields)", "Protocol Description"} + + result = {} + sections = [result] + comment = False + for line in description.splitlines(): + if not comment: + line = line.strip() + if not line: + continue + if line[0] == "[": + if line[-5:] == " End]": + # close section + del sections[-1] + section = sections[-1] + name = line[1:-5] + if comment: + section[name] = "\n".join(section[name]) + if name[:4] == "LUT ": + a = numpy.array(section[name], dtype="uint8") + a.shape = -1, 3 + section[name] = a + continue + # new section + comment = False + name = line[1:-1] + if name[:4] == "LUT ": + section = [] + elif name in ignoresections: + section = [] + comment = True + else: + section = {} + sections.append(section) + result[name] = section + continue + # add entry + if comment: + section.append(line) + continue + line = line.split("=", 1) + if len(line) == 1: + section[line[0].strip()] = None + continue + key, value = line + if key[:4] == "RGB ": + section.extend(int(rgb) for rgb in value.split()) + else: + section[key.strip()] = astype(value.strip()) + return result + + +def pilatus_description_metadata(description): + """Return metatata from Pilatus image description as dict. + + Return metadata from Pilatus pixel array detectors by Dectris, created + by camserver or TVX software. + + >>> pilatus_description_metadata('# Pixel_size 172e-6 m x 172e-6 m') + {'Pixel_size': (0.000172, 0.000172)} + + """ + result = {} + if not description.startswith("# "): + return result + for c in "#:=,()": + description = description.replace(c, " ") + for line in description.split("\n"): + if line[:2] != " ": + continue + line = line.split() + name = line[0] + if line[0] not in TIFF.PILATUS_HEADER: + try: + result["DateTime"] = datetime.datetime.strptime( + " ".join(line), "%Y-%m-%dT%H %M %S.%f" + ) + except Exception: + result[name] = " ".join(line[1:]) + continue + indices, dtype = TIFF.PILATUS_HEADER[line[0]] + if isinstance(indices[0], slice): + # assumes one slice + values = line[indices[0]] + else: + values = [line[i] for i in indices] + if dtype is float and values[0] == "not": + values = ["NaN"] + values = tuple(dtype(v) for v in values) + if dtype == str: + values = " ".join(values) + elif len(values) == 1: + values = values[0] + result[name] = values + return result + + +def svs_description_metadata(description): + """Return metatata from Aperio image description as dict. + + The Aperio image description format is unspecified. Expect failures. + + >>> svs_description_metadata('Aperio Image Library v1.0') + {'Aperio Image Library': 'v1.0'} + + """ + if not description.startswith("Aperio Image Library "): + raise ValueError("invalid Aperio image description") + result = {} + lines = description.split("\n") + key, value = lines[0].strip().rsplit(None, 1) # 'Aperio Image Library' + result[key.strip()] = value.strip() + if len(lines) == 1: + return result + items = lines[1].split("|") + result[""] = items[0].strip() # TODO: parse this? + for item in items[1:]: + key, value = item.split(" = ") + result[key.strip()] = astype(value.strip()) + return result + + +def stk_description_metadata(description): + """Return metadata from MetaMorph image description as list of dict. + + The MetaMorph image description format is unspecified. Expect failures. + + """ + description = description.strip() + if not description: + return [] + try: + description = bytes2str(description) + except UnicodeDecodeError: + warnings.warn("failed to parse MetaMorph image description") + return [] + result = [] + for plane in description.split("\x00"): + d = {} + for line in plane.split("\r\n"): + line = line.split(":", 1) + if len(line) > 1: + name, value = line + d[name.strip()] = astype(value.strip()) + else: + value = line[0].strip() + if value: + if "" in d: + d[""].append(value) + else: + d[""] = [value] + result.append(d) + return result + + +def metaseries_description_metadata(description): + """Return metatata from MetaSeries image description as dict.""" + if not description.startswith(""): + raise ValueError("invalid MetaSeries image description") + + from xml.etree import cElementTree as etree # delayed import + + root = etree.fromstring(description) + types = {"float": float, "int": int, "bool": lambda x: asbool(x, "on", "off")} + + def parse(root, result): + # recursive + for child in root: + attrib = child.attrib + if not attrib: + result[child.tag] = parse(child, {}) + continue + if "id" in attrib: + i = attrib["id"] + t = attrib["type"] + v = attrib["value"] + if t in types: + result[i] = types[t](v) + else: + result[i] = v + return result + + adict = parse(root, {}) + if "Description" in adict: + adict["Description"] = adict["Description"].replace(" ", "\n") + return adict + + +def scanimage_description_metadata(description): + """Return metatata from ScanImage image description as dict.""" + return matlabstr2py(description) + + +def scanimage_artist_metadata(artist): + """Return metatata from ScanImage artist tag as dict.""" + try: + return json.loads(artist) + except ValueError: + warnings.warn("invalid JSON '%s'" % artist) + + +def _replace_by(module_function, package=__package__, warn=None, prefix="_"): + """Try replace decorated function by module.function.""" + return lambda f: f # imageio: just use what's in here + + def _warn(e, warn): + if warn is None: + warn = "\n Functionality might be degraded or be slow.\n" + elif warn is True: + warn = "" + elif not warn: + return + warnings.warn("%s%s" % (e, warn)) + + try: + from importlib import import_module + except ImportError as e: + _warn(e, warn) + return identityfunc + + def decorate(func, module_function=module_function, warn=warn): + module, function = module_function.split(".") + try: + if package: + module = import_module("." + module, package=package) + else: + module = import_module(module) + except Exception as e: + _warn(e, warn) + return func + try: + func, oldfunc = getattr(module, function), func + except Exception as e: + _warn(e, warn) + return func + globals()[prefix + func.__name__] = oldfunc + return func + + return decorate + + +def decode_floats(data): + """Decode floating point horizontal differencing. + + The TIFF predictor type 3 reorders the bytes of the image values and + applies horizontal byte differencing to improve compression of floating + point images. The ordering of interleaved color channels is preserved. + + Parameters + ---------- + data : numpy.ndarray + The image to be decoded. The dtype must be a floating point. + The shape must include the number of contiguous samples per pixel + even if 1. + + """ + shape = data.shape + dtype = data.dtype + if len(shape) < 3: + raise ValueError("invalid data shape") + if dtype.char not in "dfe": + raise ValueError("not a floating point image") + littleendian = data.dtype.byteorder == "<" or ( + sys.byteorder == "little" and data.dtype.byteorder == "=" + ) + # undo horizontal byte differencing + data = data.view("uint8") + data.shape = shape[:-2] + (-1,) + shape[-1:] + numpy.cumsum(data, axis=-2, dtype="uint8", out=data) + # reorder bytes + if littleendian: + data.shape = shape[:-2] + (-1,) + shape[-2:] + data = numpy.swapaxes(data, -3, -2) + data = numpy.swapaxes(data, -2, -1) + data = data[..., ::-1] + # back to float + data = numpy.ascontiguousarray(data) + data = data.view(dtype) + data.shape = shape + return data + + +@_replace_by("_tifffile.decode_packbits") +def decode_packbits(encoded): + """Decompress PackBits encoded byte string. + + PackBits is a simple byte-oriented run-length compression scheme. + + """ + func = ord if sys.version[0] == "2" else identityfunc + result = [] + result_extend = result.extend + i = 0 + try: + while True: + n = func(encoded[i]) + 1 + i += 1 + if n < 129: + result_extend(encoded[i : i + n]) + i += n + elif n > 129: + result_extend(encoded[i : i + 1] * (258 - n)) + i += 1 + except IndexError: + pass + return b"".join(result) if sys.version[0] == "2" else bytes(result) + + +@_replace_by("_tifffile.decode_lzw") +def decode_lzw(encoded): + """Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte string). + + The strip must begin with a CLEAR code and end with an EOI code. + + This implementation of the LZW decoding algorithm is described in (1) and + is not compatible with old style LZW compressed files like quad-lzw.tif. + + """ + len_encoded = len(encoded) + bitcount_max = len_encoded * 8 + unpack = struct.unpack + + if sys.version[0] == "2": + newtable = [chr(i) for i in range(256)] + else: + newtable = [bytes([i]) for i in range(256)] + newtable.extend((0, 0)) + + def next_code(): + """Return integer of 'bitw' bits at 'bitcount' position in encoded.""" + start = bitcount // 8 + s = encoded[start : start + 4] + try: + code = unpack(">I", s)[0] + except Exception: + code = unpack(">I", s + b"\x00" * (4 - len(s)))[0] + code <<= bitcount % 8 + code &= mask + return code >> shr + + switchbitch = { # code: bit-width, shr-bits, bit-mask + 255: (9, 23, int(9 * "1" + "0" * 23, 2)), + 511: (10, 22, int(10 * "1" + "0" * 22, 2)), + 1023: (11, 21, int(11 * "1" + "0" * 21, 2)), + 2047: (12, 20, int(12 * "1" + "0" * 20, 2)), + } + bitw, shr, mask = switchbitch[255] + bitcount = 0 + + if len_encoded < 4: + raise ValueError("strip must be at least 4 characters long") + + if next_code() != 256: + raise ValueError("strip must begin with CLEAR code") + + code = 0 + oldcode = 0 + result = [] + result_append = result.append + while True: + code = next_code() # ~5% faster when inlining this function + bitcount += bitw + if code == 257 or bitcount >= bitcount_max: # EOI + break + if code == 256: # CLEAR + table = newtable[:] + table_append = table.append + lentable = 258 + bitw, shr, mask = switchbitch[255] + code = next_code() + bitcount += bitw + if code == 257: # EOI + break + result_append(table[code]) + else: + if code < lentable: + decoded = table[code] + newcode = table[oldcode] + decoded[:1] + else: + newcode = table[oldcode] + newcode += newcode[:1] + decoded = newcode + result_append(decoded) + table_append(newcode) + lentable += 1 + oldcode = code + if lentable in switchbitch: + bitw, shr, mask = switchbitch[lentable] + + if code != 257: + warnings.warn("unexpected end of LZW stream (code %i)" % code) + + return b"".join(result) + + +@_replace_by("_tifffile.unpack_ints") +def unpack_ints(data, dtype, itemsize, runlen=0): + """Decompress byte string to array of integers of any bit size <= 32. + + This Python implementation is slow and only handles itemsizes 1, 2, 4, 8, + 16, 32, and 64. + + Parameters + ---------- + data : byte str + Data to decompress. + dtype : numpy.dtype or str + A numpy boolean or integer type. + itemsize : int + Number of bits per integer. + runlen : int + Number of consecutive integers, after which to start at next byte. + + Examples + -------- + >>> unpack_ints(b'a', 'B', 1) + array([0, 1, 1, 0, 0, 0, 0, 1], dtype=uint8) + >>> unpack_ints(b'ab', 'B', 2) + array([1, 2, 0, 1, 1, 2, 0, 2], dtype=uint8) + + """ + if itemsize == 1: # bitarray + data = numpy.frombuffer(data, "|B") + data = numpy.unpackbits(data) + if runlen % 8: + data = data.reshape(-1, runlen + (8 - runlen % 8)) + data = data[:, :runlen].reshape(-1) + return data.astype(dtype) + + dtype = numpy.dtype(dtype) + if itemsize in (8, 16, 32, 64): + return numpy.frombuffer(data, dtype) + if itemsize not in (1, 2, 4, 8, 16, 32): + raise ValueError("itemsize not supported: %i" % itemsize) + if dtype.kind not in "biu": + raise ValueError("invalid dtype") + + itembytes = next(i for i in (1, 2, 4, 8) if 8 * i >= itemsize) + if itembytes != dtype.itemsize: + raise ValueError("dtype.itemsize too small") + if runlen == 0: + runlen = (8 * len(data)) // itemsize + skipbits = runlen * itemsize % 8 + if skipbits: + skipbits = 8 - skipbits + shrbits = itembytes * 8 - itemsize + bitmask = int(itemsize * "1" + "0" * shrbits, 2) + dtypestr = ">" + dtype.char # dtype always big-endian? + + unpack = struct.unpack + size = runlen * (len(data) * 8 // (runlen * itemsize + skipbits)) + result = numpy.empty((size,), dtype) + bitcount = 0 + for i in range(size): + start = bitcount // 8 + s = data[start : start + itembytes] + try: + code = unpack(dtypestr, s)[0] + except Exception: + code = unpack(dtypestr, s + b"\x00" * (itembytes - len(s)))[0] + code <<= bitcount % 8 + code &= bitmask + result[i] = code >> shrbits + bitcount += itemsize + if (i + 1) % runlen == 0: + bitcount += skipbits + return result + + +def unpack_rgb(data, dtype=">> data = struct.pack('BBBB', 0x21, 0x08, 0xff, 0xff) + >>> print(unpack_rgb(data, '>> print(unpack_rgb(data, '>> print(unpack_rgb(data, '= bits) + data = numpy.frombuffer(data, dtype.byteorder + dt) + result = numpy.empty((data.size, len(bitspersample)), dtype.char) + for i, bps in enumerate(bitspersample): + t = data >> int(numpy.sum(bitspersample[i + 1 :])) + t &= int("0b" + "1" * bps, 2) + if rescale: + o = ((dtype.itemsize * 8) // bps + 1) * bps + if o > data.dtype.itemsize * 8: + t = t.astype("I") + t *= (2**o - 1) // (2**bps - 1) + t //= 2 ** (o - (dtype.itemsize * 8)) + result[:, i] = t + return result.reshape(-1) + + +@_replace_by("_tifffile.reverse_bitorder") +def reverse_bitorder(data): + """Reverse bits in each byte of byte string or numpy array. + + Decode data where pixels with lower column values are stored in the + lower-order bits of the bytes (FillOrder is LSB2MSB). + + Parameters + ---------- + data : byte string or ndarray + The data to be bit reversed. If byte string, a new bit-reversed byte + string is returned. Numpy arrays are bit-reversed in-place. + + Examples + -------- + >>> reverse_bitorder(b'\\x01\\x64') + b'\\x80&' + >>> data = numpy.array([1, 666], dtype='uint16') + >>> reverse_bitorder(data) + >>> data + array([ 128, 16473], dtype=uint16) + + """ + try: + view = data.view("uint8") + numpy.take(TIFF.REVERSE_BITORDER_ARRAY, view, out=view) + except AttributeError: + return data.translate(TIFF.REVERSE_BITORDER_BYTES) + except ValueError: + raise NotImplementedError("slices of arrays not supported") + + +def apply_colormap(image, colormap, contig=True): + """Return palette-colored image. + + The image values are used to index the colormap on axis 1. The returned + image is of shape image.shape+colormap.shape[0] and dtype colormap.dtype. + + Parameters + ---------- + image : numpy.ndarray + Indexes into the colormap. + colormap : numpy.ndarray + RGB lookup table aka palette of shape (3, 2**bits_per_sample). + contig : bool + If True, return a contiguous array. + + Examples + -------- + >>> image = numpy.arange(256, dtype='uint8') + >>> colormap = numpy.vstack([image, image, image]).astype('uint16') * 256 + >>> apply_colormap(image, colormap)[-1] + array([65280, 65280, 65280], dtype=uint16) + + """ + image = numpy.take(colormap, image, axis=1) + image = numpy.rollaxis(image, 0, image.ndim) + if contig: + image = numpy.ascontiguousarray(image) + return image + + +def reorient(image, orientation): + """Return reoriented view of image array. + + Parameters + ---------- + image : numpy.ndarray + Non-squeezed output of asarray() functions. + Axes -3 and -2 must be image length and width respectively. + orientation : int or str + One of TIFF.ORIENTATION names or values. + + """ + ORIENTATION = TIFF.ORIENTATION + orientation = enumarg(ORIENTATION, orientation) + + if orientation == ORIENTATION.TOPLEFT: + return image + elif orientation == ORIENTATION.TOPRIGHT: + return image[..., ::-1, :] + elif orientation == ORIENTATION.BOTLEFT: + return image[..., ::-1, :, :] + elif orientation == ORIENTATION.BOTRIGHT: + return image[..., ::-1, ::-1, :] + elif orientation == ORIENTATION.LEFTTOP: + return numpy.swapaxes(image, -3, -2) + elif orientation == ORIENTATION.RIGHTTOP: + return numpy.swapaxes(image, -3, -2)[..., ::-1, :] + elif orientation == ORIENTATION.RIGHTBOT: + return numpy.swapaxes(image, -3, -2)[..., ::-1, :, :] + elif orientation == ORIENTATION.LEFTBOT: + return numpy.swapaxes(image, -3, -2)[..., ::-1, ::-1, :] + + +def repeat_nd(a, repeats): + """Return read-only view into input array with elements repeated. + + Zoom nD image by integer factors using nearest neighbor interpolation + (box filter). + + Parameters + ---------- + a : array_like + Input array. + repeats : sequence of int + The number of repetitions to apply along each dimension of input array. + + Example + ------- + >>> repeat_nd([[1, 2], [3, 4]], (2, 2)) + array([[1, 1, 2, 2], + [1, 1, 2, 2], + [3, 3, 4, 4], + [3, 3, 4, 4]]) + + """ + a = numpy.asarray(a) + reshape = [] + shape = [] + strides = [] + for i, j, k in zip(a.strides, a.shape, repeats): + shape.extend((j, k)) + strides.extend((i, 0)) + reshape.append(j * k) + return numpy.lib.stride_tricks.as_strided( + a, shape, strides, writeable=False + ).reshape(reshape) + + +def reshape_nd(data_or_shape, ndim): + """Return image array or shape with at least ndim dimensions. + + Prepend 1s to image shape as necessary. + + >>> reshape_nd(numpy.empty(0), 1).shape + (0,) + >>> reshape_nd(numpy.empty(1), 2).shape + (1, 1) + >>> reshape_nd(numpy.empty((2, 3)), 3).shape + (1, 2, 3) + >>> reshape_nd(numpy.empty((3, 4, 5)), 3).shape + (3, 4, 5) + >>> reshape_nd((2, 3), 3) + (1, 2, 3) + + """ + is_shape = isinstance(data_or_shape, tuple) + shape = data_or_shape if is_shape else data_or_shape.shape + if len(shape) >= ndim: + return data_or_shape + shape = (1,) * (ndim - len(shape)) + shape + return shape if is_shape else data_or_shape.reshape(shape) + + +def squeeze_axes(shape, axes, skip="XY"): + """Return shape and axes with single-dimensional entries removed. + + Remove unused dimensions unless their axes are listed in 'skip'. + + >>> squeeze_axes((5, 1, 2, 1, 1), 'TZYXC') + ((5, 2, 1), 'TYX') + + """ + if len(shape) != len(axes): + raise ValueError("dimensions of axes and shape do not match") + shape, axes = zip(*(i for i in zip(shape, axes) if i[0] > 1 or i[1] in skip)) + return tuple(shape), "".join(axes) + + +def transpose_axes(image, axes, asaxes="CTZYX"): + """Return image with its axes permuted to match specified axes. + + A view is returned if possible. + + >>> transpose_axes(numpy.zeros((2, 3, 4, 5)), 'TYXC', asaxes='CTZYX').shape + (5, 2, 1, 3, 4) + + """ + for ax in axes: + if ax not in asaxes: + raise ValueError("unknown axis %s" % ax) + # add missing axes to image + shape = image.shape + for ax in reversed(asaxes): + if ax not in axes: + axes = ax + axes + shape = (1,) + shape + image = image.reshape(shape) + # transpose axes + image = image.transpose([axes.index(ax) for ax in asaxes]) + return image + + +def reshape_axes(axes, shape, newshape, unknown="Q"): + """Return axes matching new shape. + + Unknown dimensions are labelled 'Q'. + + >>> reshape_axes('YXS', (219, 301, 1), (219, 301)) + 'YX' + >>> reshape_axes('IYX', (12, 219, 301), (3, 4, 219, 1, 301, 1)) + 'QQYQXQ' + + """ + shape = tuple(shape) + newshape = tuple(newshape) + if len(axes) != len(shape): + raise ValueError("axes do not match shape") + + size = product(shape) + newsize = product(newshape) + if size != newsize: + raise ValueError("cannot reshape %s to %s" % (shape, newshape)) + if not axes or not newshape: + return "" + + lendiff = max(0, len(shape) - len(newshape)) + if lendiff: + newshape = newshape + (1,) * lendiff + + i = len(shape) - 1 + prodns = 1 + prods = 1 + result = [] + for ns in newshape[::-1]: + prodns *= ns + while i > 0 and shape[i] == 1 and ns != 1: + i -= 1 + if ns == shape[i] and prodns == prods * shape[i]: + prods *= shape[i] + result.append(axes[i]) + i -= 1 + else: + result.append(unknown) + + return "".join(reversed(result[lendiff:])) + + +def stack_pages(pages, out=None, maxworkers=1, *args, **kwargs): + """Read data from sequence of TiffPage and stack them vertically. + + Additional parameters are passed to the TiffPage.asarray function. + + """ + npages = len(pages) + if npages == 0: + raise ValueError("no pages") + + if npages == 1: + return pages[0].asarray(out=out, *args, **kwargs) + + page0 = next(p for p in pages if p is not None) + page0.asarray(validate=None) # ThreadPoolExecutor swallows exceptions + shape = (npages,) + page0.keyframe.shape + dtype = page0.keyframe.dtype + out = create_output(out, shape, dtype) + + if maxworkers is None: + maxworkers = multiprocessing.cpu_count() // 2 + page0.parent.filehandle.lock = maxworkers > 1 + + filecache = OpenFileCache( + size=max(4, maxworkers), lock=page0.parent.filehandle.lock + ) + + def func(page, index, out=out, filecache=filecache, args=args, kwargs=kwargs): + """Read, decode, and copy page data.""" + if page is not None: + filecache.open(page.parent.filehandle) + out[index] = page.asarray( + lock=filecache.lock, reopen=False, validate=False, *args, **kwargs + ) + filecache.close(page.parent.filehandle) + + if maxworkers < 2: + for i, page in enumerate(pages): + func(page, i) + else: + with concurrent.futures.ThreadPoolExecutor(maxworkers) as executor: + executor.map(func, pages, range(npages)) + + filecache.clear() + page0.parent.filehandle.lock = None + + return out + + +def clean_offsets_counts(offsets, counts): + """Return cleaned offsets and byte counts. + + Remove zero offsets and counts. Use to sanitize _offsets and _bytecounts + tag values for strips or tiles. + + """ + offsets = list(offsets) + counts = list(counts) + assert len(offsets) == len(counts) + j = 0 + for i, (o, b) in enumerate(zip(offsets, counts)): + if o > 0 and b > 0: + if i > j: + offsets[j] = o + counts[j] = b + j += 1 + elif b > 0 and o <= 0: + raise ValueError("invalid offset") + else: + warnings.warn("empty byte count") + if j == 0: + j = 1 + return offsets[:j], counts[:j] + + +def buffered_read(fh, lock, offsets, bytecounts, buffersize=2**26): + """Return iterator over blocks read from file.""" + length = len(offsets) + i = 0 + while i < length: + data = [] + with lock: + size = 0 + while size < buffersize and i < length: + fh.seek(offsets[i]) + bytecount = bytecounts[i] + data.append(fh.read(bytecount)) + size += bytecount + i += 1 + for block in data: + yield block + + +def create_output(out, shape, dtype, mode="w+", suffix=".memmap"): + """Return numpy array where image data of shape and dtype can be copied. + + The 'out' parameter may have the following values or types: + + None + An empty array of shape and dtype is created and returned. + numpy.ndarray + An existing writable array of compatible dtype and shape. A view of + the same array is returned after verification. + 'memmap' or 'memmap:tempdir' + A memory-map to an array stored in a temporary binary file on disk + is created and returned. + str or open file + The file name or file object used to create a memory-map to an array + stored in a binary file on disk. The created memory-mapped array is + returned. + + """ + if out is None: + return numpy.zeros(shape, dtype) + if isinstance(out, str) and out[:6] == "memmap": + tempdir = out[7:] if len(out) > 7 else None + with tempfile.NamedTemporaryFile(dir=tempdir, suffix=suffix) as fh: + return numpy.memmap(fh, shape=shape, dtype=dtype, mode=mode) + if isinstance(out, numpy.ndarray): + if product(shape) != product(out.shape): + raise ValueError("incompatible output shape") + if not numpy.can_cast(dtype, out.dtype): + raise ValueError("incompatible output dtype") + return out.reshape(shape) + if isinstance(out, pathlib.Path): + out = str(out) + return numpy.memmap(out, shape=shape, dtype=dtype, mode=mode) + + +def matlabstr2py(string): + """Return Python object from Matlab string representation. + + Return str, bool, int, float, list (Matlab arrays or cells), or + dict (Matlab structures) types. + + Use to access ScanImage metadata. + + >>> matlabstr2py('1') + 1 + >>> matlabstr2py("['x y z' true false; 1 2.0 -3e4; NaN Inf @class]") + [['x y z', True, False], [1, 2.0, -30000.0], [nan, inf, '@class']] + >>> d = matlabstr2py("SI.hChannels.channelType = {'stripe' 'stripe'}\\n" + ... "SI.hChannels.channelsActive = 2") + >>> d['SI.hChannels.channelType'] + ['stripe', 'stripe'] + + """ + # TODO: handle invalid input + # TODO: review unboxing of multidimensional arrays + + def lex(s): + # return sequence of tokens from matlab string representation + tokens = ["["] + while True: + t, i = next_token(s) + if t is None: + break + if t == ";": + tokens.extend(("]", "[")) + elif t == "[": + tokens.extend(("[", "[")) + elif t == "]": + tokens.extend(("]", "]")) + else: + tokens.append(t) + s = s[i:] + tokens.append("]") + return tokens + + def next_token(s): + # return next token in matlab string + length = len(s) + if length == 0: + return None, 0 + i = 0 + while i < length and s[i] == " ": + i += 1 + if i == length: + return None, i + if s[i] in "{[;]}": + return s[i], i + 1 + if s[i] == "'": + j = i + 1 + while j < length and s[j] != "'": + j += 1 + return s[i : j + 1], j + 1 + if s[i] == "<": + j = i + 1 + while j < length and s[j] != ">": + j += 1 + return s[i : j + 1], j + 1 + j = i + while j < length and s[j] not in " {[;]}": + j += 1 + return s[i:j], j + + def value(s, fail=False): + # return Python value of token + s = s.strip() + if not s: + return s + if len(s) == 1: + try: + return int(s) + except Exception: + if fail: + raise ValueError() + return s + if s[0] == "'": + if fail and s[-1] != "'" or "'" in s[1:-1]: + raise ValueError() + return s[1:-1] + if s[0] == "<": + if fail and s[-1] != ">" or "<" in s[1:-1]: + raise ValueError() + return s + if fail and any(i in s for i in " ';[]{}"): + raise ValueError() + if s[0] == "@": + return s + if s in ("true", "True"): + return True + if s in ("false", "False"): + return False + if s[:6] == "zeros(": + return numpy.zeros([int(i) for i in s[6:-1].split(",")]).tolist() + if s[:5] == "ones(": + return numpy.ones([int(i) for i in s[5:-1].split(",")]).tolist() + if "." in s or "e" in s: + try: + return float(s) + except Exception: + pass + try: + return int(s) + except Exception: + pass + try: + return float(s) # nan, inf + except Exception: + if fail: + raise ValueError() + return s + + def parse(s): + # return Python value from string representation of Matlab value + s = s.strip() + try: + return value(s, fail=True) + except ValueError: + pass + result = add2 = [] + levels = [add2] + for t in lex(s): + if t in "[{": + add2 = [] + levels.append(add2) + elif t in "]}": + x = levels.pop() + if len(x) == 1 and isinstance(x[0], (list, str)): + x = x[0] + add2 = levels[-1] + add2.append(x) + else: + add2.append(value(t)) + if len(result) == 1 and isinstance(result[0], (list, str)): + result = result[0] + return result + + if "\r" in string or "\n" in string: + # structure + d = {} + for line in string.splitlines(): + line = line.strip() + if not line or line[0] == "%": + continue + k, v = line.split("=", 1) + k = k.strip() + if any(c in k for c in " ';[]{}<>"): + continue + d[k] = parse(v) + return d + return parse(string) + + +def stripnull(string, null=b"\x00"): + """Return string truncated at first null character. + + Clean NULL terminated C strings. For unicode strings use null='\\0'. + + >>> stripnull(b'string\\x00') + b'string' + >>> stripnull('string\\x00', null='\\0') + 'string' + + """ + i = string.find(null) + return string if (i < 0) else string[:i] + + +def stripascii(string): + """Return string truncated at last byte that is 7-bit ASCII. + + Clean NULL separated and terminated TIFF strings. + + >>> stripascii(b'string\\x00string\\n\\x01\\x00') + b'string\\x00string\\n' + >>> stripascii(b'\\x00') + b'' + + """ + # TODO: pythonize this + i = len(string) + while i: + i -= 1 + if 8 < byte2int(string[i]) < 127: + break + else: + i = -1 + return string[: i + 1] + + +def asbool(value, true=(b"true", "true"), false=(b"false", "false")): + """Return string as bool if possible, else raise TypeError. + + >>> asbool(b' False ') + False + + """ + value = value.strip().lower() + if value in true: # might raise UnicodeWarning/BytesWarning + return True + if value in false: + return False + raise TypeError() + + +def astype(value, types=None): + """Return argument as one of types if possible. + + >>> astype('42') + 42 + >>> astype('3.14') + 3.14 + >>> astype('True') + True + >>> astype(b'Neee-Wom') + 'Neee-Wom' + + """ + if types is None: + types = int, float, asbool, bytes2str + for typ in types: + try: + return typ(value) + except (ValueError, AttributeError, TypeError, UnicodeEncodeError): + pass + return value + + +def format_size(size, threshold=1536): + """Return file size as string from byte size. + + >>> format_size(1234) + '1234 B' + >>> format_size(12345678901) + '11.50 GiB' + + """ + if size < threshold: + return "%i B" % size + for unit in ("KiB", "MiB", "GiB", "TiB", "PiB"): + size /= 1024.0 + if size < threshold: + return "%.2f %s" % (size, unit) + + +def identityfunc(arg): + """Single argument identity function. + + >>> identityfunc('arg') + 'arg' + + """ + return arg + + +def nullfunc(*args, **kwargs): + """Null function. + + >>> nullfunc('arg', kwarg='kwarg') + + """ + return + + +def sequence(value): + """Return tuple containing value if value is not a sequence. + + >>> sequence(1) + (1,) + >>> sequence([1]) + [1] + + """ + try: + len(value) + return value + except TypeError: + return (value,) + + +def product(iterable): + """Return product of sequence of numbers. + + Equivalent of functools.reduce(operator.mul, iterable, 1). + Multiplying numpy integers might overflow. + + >>> product([2**8, 2**30]) + 274877906944 + >>> product([]) + 1 + + """ + prod = 1 + for i in iterable: + prod *= i + return prod + + +def natural_sorted(iterable): + """Return human sorted list of strings. + + E.g. for sorting file names. + + >>> natural_sorted(['f1', 'f2', 'f10']) + ['f1', 'f2', 'f10'] + + """ + + def sortkey(x): + return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)] + + numbers = re.compile(r"(\d+)") + return sorted(iterable, key=sortkey) + + +def excel_datetime(timestamp, epoch=datetime.datetime.fromordinal(693594)): + """Return datetime object from timestamp in Excel serial format. + + Convert LSM time stamps. + + >>> excel_datetime(40237.029999999795) + datetime.datetime(2010, 2, 28, 0, 43, 11, 999982) + + """ + return epoch + datetime.timedelta(timestamp) + + +def julian_datetime(julianday, milisecond=0): + """Return datetime from days since 1/1/4713 BC and ms since midnight. + + Convert Julian dates according to MetaMorph. + + >>> julian_datetime(2451576, 54362783) + datetime.datetime(2000, 2, 2, 15, 6, 2, 783) + + """ + if julianday <= 1721423: + # no datetime before year 1 + return None + + a = julianday + 1 + if a > 2299160: + alpha = math.trunc((a - 1867216.25) / 36524.25) + a += 1 + alpha - alpha // 4 + b = a + (1524 if a > 1721423 else 1158) + c = math.trunc((b - 122.1) / 365.25) + d = math.trunc(365.25 * c) + e = math.trunc((b - d) / 30.6001) + + day = b - d - math.trunc(30.6001 * e) + month = e - (1 if e < 13.5 else 13) + year = c - (4716 if month > 2.5 else 4715) + + hour, milisecond = divmod(milisecond, 1000 * 60 * 60) + minute, milisecond = divmod(milisecond, 1000 * 60) + second, milisecond = divmod(milisecond, 1000) + + return datetime.datetime(year, month, day, hour, minute, second, milisecond) + + +def byteorder_isnative(byteorder): + """Return if byteorder matches the system's byteorder. + + >>> byteorder_isnative('=') + True + + """ + if byteorder == "=" or byteorder == sys.byteorder: + return True + keys = {"big": ">", "little": "<"} + return keys.get(byteorder, byteorder) == keys[sys.byteorder] + + +def recarray2dict(recarray): + """Return numpy.recarray as dict.""" + # TODO: subarrays + result = {} + for descr, value in zip(recarray.dtype.descr, recarray): + name, dtype = descr[:2] + if dtype[1] == "S": + value = bytes2str(stripnull(value)) + elif value.ndim < 2: + value = value.tolist() + result[name] = value + return result + + +def xml2dict(xml, sanitize=True, prefix=None): + """Return XML as dict. + + >>> xml2dict('1') + {'root': {'key': 1, 'attr': 'name'}} + + """ + from xml.etree import cElementTree as etree # delayed import + + at = tx = "" + if prefix: + at, tx = prefix + + def astype(value): + # return value as int, float, bool, or str + for t in (int, float, asbool): + try: + return t(value) + except Exception: + pass + return value + + def etree2dict(t): + # adapted from https://stackoverflow.com/a/10077069/453463 + key = t.tag + if sanitize: + key = key.rsplit("}", 1)[-1] + d = {key: {} if t.attrib else None} + children = list(t) + if children: + dd = collections.defaultdict(list) + for dc in map(etree2dict, children): + for k, v in dc.items(): + dd[k].append(astype(v)) + d = { + key: { + k: astype(v[0]) if len(v) == 1 else astype(v) for k, v in dd.items() + } + } + if t.attrib: + d[key].update((at + k, astype(v)) for k, v in t.attrib.items()) + if t.text: + text = t.text.strip() + if children or t.attrib: + if text: + d[key][tx + "value"] = astype(text) + else: + d[key] = astype(text) + return d + + return etree2dict(etree.fromstring(xml)) + + +def hexdump(bytestr, width=75, height=24, snipat=-2, modulo=2, ellipsis="..."): + """Return hexdump representation of byte string. + + >>> hexdump(binascii.unhexlify('49492a00080000000e00fe0004000100')) + '49 49 2a 00 08 00 00 00 0e 00 fe 00 04 00 01 00 II*.............' + + """ + size = len(bytestr) + if size < 1 or width < 2 or height < 1: + return "" + if height == 1: + addr = b"" + bytesperline = min(modulo * (((width - len(addr)) // 4) // modulo), size) + if bytesperline < 1: + return "" + nlines = 1 + else: + addr = b"%%0%ix: " % len(b"%x" % size) + bytesperline = min(modulo * (((width - len(addr % 1)) // 4) // modulo), size) + if bytesperline < 1: + return "" + width = 3 * bytesperline + len(addr % 1) + nlines = (size - 1) // bytesperline + 1 + + if snipat is None or snipat == 1: + snipat = height + elif 0 < abs(snipat) < 1: + snipat = int(math.floor(height * snipat)) + if snipat < 0: + snipat += height + + if height == 1 or nlines == 1: + blocks = [(0, bytestr[:bytesperline])] + addr = b"" + height = 1 + width = 3 * bytesperline + elif height is None or nlines <= height: + blocks = [(0, bytestr)] + elif snipat <= 0: + start = bytesperline * (nlines - height) + blocks = [(start, bytestr[start:])] # (start, None) + elif snipat >= height or height < 3: + end = bytesperline * height + blocks = [(0, bytestr[:end])] # (end, None) + else: + end1 = bytesperline * snipat + end2 = bytesperline * (height - snipat - 1) + blocks = [ + (0, bytestr[:end1]), + (size - end1 - end2, None), + (size - end2, bytestr[size - end2 :]), + ] + + ellipsis = str2bytes(ellipsis) + result = [] + for start, bytestr in blocks: + if bytestr is None: + result.append(ellipsis) # 'skip %i bytes' % start) + continue + hexstr = binascii.hexlify(bytestr) + strstr = re.sub(rb"[^\x20-\x7f]", b".", bytestr) + for i in range(0, len(bytestr), bytesperline): + h = hexstr[2 * i : 2 * i + bytesperline * 2] + r = (addr % (i + start)) if height > 1 else addr + r += b" ".join(h[i : i + 2] for i in range(0, 2 * bytesperline, 2)) + r += b" " * (width - len(r)) + r += strstr[i : i + bytesperline] + result.append(r) + result = b"\n".join(result) + if sys.version_info[0] == 3: + result = result.decode("ascii") + return result + + +def isprintable(string): + """Return if all characters in string are printable. + + >>> isprintable('abc') + True + >>> isprintable(b'\01') + False + + """ + string = string.strip() + if len(string) < 1: + return True + if sys.version_info[0] == 3: + try: + return string.isprintable() + except Exception: + pass + try: + return string.decode("utf-8").isprintable() + except Exception: + pass + else: + if string.isalnum(): + return True + printable = ( + "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRST" + "UVWXYZ!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ \t\n\r\x0b\x0c" + ) + return all(c in printable for c in string) + + +def clean_whitespace(string, compact=False): + """Return string with compressed whitespace.""" + for a, b in ( + ("\r\n", "\n"), + ("\r", "\n"), + ("\n\n", "\n"), + ("\t", " "), + (" ", " "), + ): + string = string.replace(a, b) + if compact: + for a, b in (("\n", " "), ("[ ", "["), (" ", " "), (" ", " "), (" ", " ")): + string = string.replace(a, b) + return string.strip() + + +def pformat_xml(xml): + """Return pretty formatted XML.""" + try: + import lxml.etree as etree # delayed import + + if not isinstance(xml, bytes): + xml = xml.encode("utf-8") + xml = etree.parse(io.BytesIO(xml)) + xml = etree.tostring( + xml, pretty_print=True, xml_declaration=True, encoding=xml.docinfo.encoding + ) + xml = bytes2str(xml) + except Exception: + if isinstance(xml, bytes): + xml = bytes2str(xml) + xml = xml.replace("><", ">\n<") + return xml.replace(" ", " ").replace("\t", " ") + + +def pformat(arg, width=79, height=24, compact=True): + """Return pretty formatted representation of object as string. + + Whitespace might be altered. + + """ + if height is None or height < 1: + height = 1024 + if width is None or width < 1: + width = 256 + + npopt = numpy.get_printoptions() + numpy.set_printoptions(threshold=100, linewidth=width) + + if isinstance(arg, basestring): + if arg[:5].lower() in (" height: + arg = "\n".join(argl[: height // 2] + ["..."] + argl[-height // 2 :]) + return arg + + +def snipstr(string, width=79, snipat=0.5, ellipsis="..."): + """Return string cut to specified length. + + >>> snipstr('abcdefghijklmnop', 8) + 'abc...op' + + """ + if ellipsis is None: + if isinstance(string, bytes): + ellipsis = b"..." + else: + ellipsis = "\u2026" # does not print on win-py3.5 + esize = len(ellipsis) + + splitlines = string.splitlines() + # TODO: finish and test multiline snip + + result = [] + for line in splitlines: + if line is None: + result.append(ellipsis) + continue + linelen = len(line) + if linelen <= width: + result.append(string) + continue + + split = snipat + if split is None or split == 1: + split = linelen + elif 0 < abs(split) < 1: + split = int(math.floor(linelen * split)) + if split < 0: + split += linelen + if split < 0: + split = 0 + + if esize == 0 or width < esize + 1: + if split <= 0: + result.append(string[-width:]) + else: + result.append(string[:width]) + elif split <= 0: + result.append(ellipsis + string[esize - width :]) + elif split >= linelen or width < esize + 4: + result.append(string[: width - esize] + ellipsis) + else: + splitlen = linelen - width + esize + end1 = split - splitlen // 2 + end2 = end1 + splitlen + result.append(string[:end1] + ellipsis + string[end2:]) + + if isinstance(string, bytes): + return b"\n".join(result) + else: + return "\n".join(result) + + +def enumarg(enum, arg): + """Return enum member from its name or value. + + >>> enumarg(TIFF.PHOTOMETRIC, 2) + + >>> enumarg(TIFF.PHOTOMETRIC, 'RGB') + + + """ + try: + return enum(arg) + except Exception: + try: + return enum[arg.upper()] + except Exception: + raise ValueError("invalid argument %s" % arg) + + +def parse_kwargs(kwargs, *keys, **keyvalues): + """Return dict with keys from keys|keyvals and values from kwargs|keyvals. + + Existing keys are deleted from kwargs. + + >>> kwargs = {'one': 1, 'two': 2, 'four': 4} + >>> kwargs2 = parse_kwargs(kwargs, 'two', 'three', four=None, five=5) + >>> kwargs == {'one': 1} + True + >>> kwargs2 == {'two': 2, 'four': 4, 'five': 5} + True + + """ + result = {} + for key in keys: + if key in kwargs: + result[key] = kwargs[key] + del kwargs[key] + for key, value in keyvalues.items(): + if key in kwargs: + result[key] = kwargs[key] + del kwargs[key] + else: + result[key] = value + return result + + +def update_kwargs(kwargs, **keyvalues): + """Update dict with keys and values if keys do not already exist. + + >>> kwargs = {'one': 1, } + >>> update_kwargs(kwargs, one=None, two=2) + >>> kwargs == {'one': 1, 'two': 2} + True + + """ + for key, value in keyvalues.items(): + if key not in kwargs: + kwargs[key] = value + + +def validate_jhove(filename, jhove="jhove", ignore=("More than 50 IFDs",)): + """Validate TIFF file using jhove -m TIFF-hul. + + Raise ValueError if jhove outputs an error message unless the message + contains one of the strings in 'ignore'. + + JHOVE does not support bigtiff or more than 50 IFDs. + + See `JHOVE TIFF-hul Module `_ + + """ + import subprocess # noqa: delayed import + + out = subprocess.check_output([jhove, filename, "-m", "TIFF-hul"]) + if b"ErrorMessage: " in out: + for line in out.splitlines(): + line = line.strip() + if line.startswith(b"ErrorMessage: "): + error = line[14:].decode("utf8") + for i in ignore: + if i in error: + break + else: + raise ValueError(error) + break + + +def lsm2bin(lsmfile, binfile=None, tile=(256, 256), verbose=True): + """Convert [MP]TZCYX LSM file to series of BIN files. + + One BIN file containing 'ZCYX' data are created for each position, time, + and tile. The position, time, and tile indices are encoded at the end + of the filenames. + + """ + verbose = print_ if verbose else nullfunc + + if binfile is None: + binfile = lsmfile + elif binfile.lower() == "none": + binfile = None + if binfile: + binfile += "_(z%ic%iy%ix%i)_m%%ip%%it%%03iy%%ix%%i.bin" + + verbose("\nOpening LSM file... ", end="", flush=True) + start_time = time.time() + + with TiffFile(lsmfile) as lsm: + if not lsm.is_lsm: + verbose("\n", lsm, flush=True) + raise ValueError("not a LSM file") + series = lsm.series[0] # first series contains the image data + shape = series.shape + axes = series.axes + dtype = series.dtype + size = product(shape) * dtype.itemsize + + verbose("%.3f s" % (time.time() - start_time)) + # verbose(lsm, flush=True) + verbose( + "Image\n axes: %s\n shape: %s\n dtype: %s\n size: %s" + % (axes, shape, dtype, format_size(size)), + flush=True, + ) + if not series.axes.endswith("TZCYX"): + raise ValueError("not a *TZCYX LSM file") + + verbose("Copying image from LSM to BIN files", end="", flush=True) + start_time = time.time() + tiles = shape[-2] // tile[-2], shape[-1] // tile[-1] + if binfile: + binfile = binfile % (shape[-4], shape[-3], tile[0], tile[1]) + shape = (1,) * (7 - len(shape)) + shape + # cache for ZCYX stacks and output files + data = numpy.empty(shape[3:], dtype=dtype) + out = numpy.empty((shape[-4], shape[-3], tile[0], tile[1]), dtype=dtype) + # iterate over Tiff pages containing data + pages = iter(series.pages) + for m in range(shape[0]): # mosaic axis + for p in range(shape[1]): # position axis + for t in range(shape[2]): # time axis + for z in range(shape[3]): # z slices + data[z] = next(pages).asarray() + for y in range(tiles[0]): # tile y + for x in range(tiles[1]): # tile x + out[:] = data[ + ..., + y * tile[0] : (y + 1) * tile[0], + x * tile[1] : (x + 1) * tile[1], + ] + if binfile: + out.tofile(binfile % (m, p, t, y, x)) + verbose(".", end="", flush=True) + verbose(" %.3f s" % (time.time() - start_time)) + + +def imshow( + data, + title=None, + vmin=0, + vmax=None, + cmap=None, + bitspersample=None, + photometric="RGB", + interpolation=None, + dpi=96, + figure=None, + subplot=111, + maxdim=32768, + **kwargs +): + """Plot n-dimensional images using matplotlib.pyplot. + + Return figure, subplot and plot axis. + Requires pyplot already imported C{from matplotlib import pyplot}. + + Parameters + ---------- + bitspersample : int or None + Number of bits per channel in integer RGB images. + photometric : {'MINISWHITE', 'MINISBLACK', 'RGB', or 'PALETTE'} + The color space of the image data. + title : str + Window and subplot title. + figure : matplotlib.figure.Figure (optional). + Matplotlib to use for plotting. + subplot : int + A matplotlib.pyplot.subplot axis. + maxdim : int + maximum image width and length. + kwargs : optional + Arguments for matplotlib.pyplot.imshow. + + """ + isrgb = photometric in ("RGB",) # 'PALETTE', 'YCBCR' + if data.dtype.kind == "b": + isrgb = False + if isrgb and not ( + data.shape[-1] in (3, 4) or (data.ndim > 2 and data.shape[-3] in (3, 4)) + ): + isrgb = False + photometric = "MINISBLACK" + + data = data.squeeze() + if photometric in ("MINISWHITE", "MINISBLACK", None): + data = reshape_nd(data, 2) + else: + data = reshape_nd(data, 3) + + dims = data.ndim + if dims < 2: + raise ValueError("not an image") + elif dims == 2: + dims = 0 + isrgb = False + else: + if isrgb and data.shape[-3] in (3, 4): + data = numpy.swapaxes(data, -3, -2) + data = numpy.swapaxes(data, -2, -1) + elif not isrgb and ( + data.shape[-1] < data.shape[-2] // 8 + and data.shape[-1] < data.shape[-3] // 8 + and data.shape[-1] < 5 + ): + data = numpy.swapaxes(data, -3, -1) + data = numpy.swapaxes(data, -2, -1) + isrgb = isrgb and data.shape[-1] in (3, 4) + dims -= 3 if isrgb else 2 + + if isrgb: + data = data[..., :maxdim, :maxdim, :maxdim] + else: + data = data[..., :maxdim, :maxdim] + + if photometric == "PALETTE" and isrgb: + datamax = data.max() + if datamax > 255: + data = data >> 8 # possible precision loss + data = data.astype("B") + elif data.dtype.kind in "ui": + if not (isrgb and data.dtype.itemsize <= 1) or bitspersample is None: + try: + bitspersample = int(math.ceil(math.log(data.max(), 2))) + except Exception: + bitspersample = data.dtype.itemsize * 8 + elif not isinstance(bitspersample, inttypes): + # bitspersample can be tuple, e.g. (5, 6, 5) + bitspersample = data.dtype.itemsize * 8 + datamax = 2**bitspersample + if isrgb: + if bitspersample < 8: + data = data << (8 - bitspersample) + elif bitspersample > 8: + data = data >> (bitspersample - 8) # precision loss + data = data.astype("B") + elif data.dtype.kind == "f": + datamax = data.max() + if isrgb and datamax > 1.0: + if data.dtype.char == "d": + data = data.astype("f") + data /= datamax + else: + data = data / datamax + elif data.dtype.kind == "b": + datamax = 1 + elif data.dtype.kind == "c": + data = numpy.absolute(data) + datamax = data.max() + + if not isrgb: + if vmax is None: + vmax = datamax + if vmin is None: + if data.dtype.kind == "i": + dtmin = numpy.iinfo(data.dtype).min + vmin = numpy.min(data) + if vmin == dtmin: + vmin = numpy.min(data > dtmin) + if data.dtype.kind == "f": + dtmin = numpy.finfo(data.dtype).min + vmin = numpy.min(data) + if vmin == dtmin: + vmin = numpy.min(data > dtmin) + else: + vmin = 0 + + pyplot = sys.modules["matplotlib.pyplot"] + + if figure is None: + pyplot.rc("font", family="sans-serif", weight="normal", size=8) + figure = pyplot.figure( + dpi=dpi, figsize=(10.3, 6.3), frameon=True, facecolor="1.0", edgecolor="w" + ) + try: + figure.canvas.manager.window.title(title) + except Exception: + pass + size = len(title.splitlines()) if title else 1 + pyplot.subplots_adjust( + bottom=0.03 * (dims + 2), + top=0.98 - size * 0.03, + left=0.1, + right=0.95, + hspace=0.05, + wspace=0.0, + ) + subplot = pyplot.subplot(subplot) + + if title: + try: + title = unicode(title, "Windows-1252") + except TypeError: + pass + pyplot.title(title, size=11) + + if cmap is None: + if data.dtype.char == "?": + cmap = "gray" + elif data.dtype.kind in "buf" or vmin == 0: + cmap = "viridis" + else: + cmap = "coolwarm" + if photometric == "MINISWHITE": + cmap += "_r" + + image = pyplot.imshow( + numpy.atleast_2d(data[(0,) * dims].squeeze()), + vmin=vmin, + vmax=vmax, + cmap=cmap, + interpolation=interpolation, + **kwargs + ) + + if not isrgb: + pyplot.colorbar() # panchor=(0.55, 0.5), fraction=0.05 + + def format_coord(x, y): + # callback function to format coordinate display in toolbar + x = int(x + 0.5) + y = int(y + 0.5) + try: + if dims: + return "%s @ %s [%4i, %4i]" % (curaxdat[1][y, x], current, y, x) + return "%s @ [%4i, %4i]" % (data[y, x], y, x) + except IndexError: + return "" + + def none(event): + return "" + + subplot.format_coord = format_coord + image.get_cursor_data = none + image.format_cursor_data = none + + if dims: + current = list((0,) * dims) + curaxdat = [0, data[tuple(current)].squeeze()] + sliders = [ + pyplot.Slider( + pyplot.axes([0.125, 0.03 * (axis + 1), 0.725, 0.025]), + "Dimension %i" % axis, + 0, + data.shape[axis] - 1, + 0, + facecolor="0.5", + valfmt="%%.0f [%i]" % data.shape[axis], + ) + for axis in range(dims) + ] + for slider in sliders: + slider.drawon = False + + def set_image(current, sliders=sliders, data=data): + # change image and redraw canvas + curaxdat[1] = data[tuple(current)].squeeze() + image.set_data(curaxdat[1]) + for ctrl, index in zip(sliders, current): + ctrl.eventson = False + ctrl.set_val(index) + ctrl.eventson = True + figure.canvas.draw() + + def on_changed(index, axis, data=data, current=current): + # callback function for slider change event + index = int(round(index)) + curaxdat[0] = axis + if index == current[axis]: + return + if index >= data.shape[axis]: + index = 0 + elif index < 0: + index = data.shape[axis] - 1 + current[axis] = index + set_image(current) + + def on_keypressed(event, data=data, current=current): + # callback function for key press event + key = event.key + axis = curaxdat[0] + if str(key) in "0123456789": + on_changed(key, axis) + elif key == "right": + on_changed(current[axis] + 1, axis) + elif key == "left": + on_changed(current[axis] - 1, axis) + elif key == "up": + curaxdat[0] = 0 if axis == len(data.shape) - 1 else axis + 1 + elif key == "down": + curaxdat[0] = len(data.shape) - 1 if axis == 0 else axis - 1 + elif key == "end": + on_changed(data.shape[axis] - 1, axis) + elif key == "home": + on_changed(0, axis) + + figure.canvas.mpl_connect("key_press_event", on_keypressed) + for axis, ctrl in enumerate(sliders): + ctrl.on_changed(lambda k, a=axis: on_changed(k, a)) + + return figure, subplot, image + + +def _app_show(): + """Block the GUI. For use as skimage plugin.""" + pyplot = sys.modules["matplotlib.pyplot"] + pyplot.show() + + +def askopenfilename(**kwargs): + """Return file name(s) from Tkinter's file open dialog.""" + try: + from Tkinter import Tk + import tkFileDialog as filedialog + except ImportError: + from tkinter import Tk, filedialog + root = Tk() + root.withdraw() + root.update() + filenames = filedialog.askopenfilename(**kwargs) + root.destroy() + return filenames + + +def main(argv=None): + """Command line usage main function.""" + if float(sys.version[0:3]) < 2.7: + print("This script requires Python version 2.7 or better.") + print("This is Python version %s" % sys.version) + return 0 + if argv is None: + argv = sys.argv + + import optparse # TODO: use argparse + + parser = optparse.OptionParser( + usage="usage: %prog [options] path", + description="Display image data in TIFF files.", + version="%%prog %s" % __version__, + ) + opt = parser.add_option + opt("-p", "--page", dest="page", type="int", default=-1, help="display single page") + opt( + "-s", + "--series", + dest="series", + type="int", + default=-1, + help="display series of pages of same shape", + ) + opt( + "--nomultifile", + dest="nomultifile", + action="store_true", + default=False, + help="do not read OME series from multiple files", + ) + opt( + "--noplots", + dest="noplots", + type="int", + default=8, + help="maximum number of plots", + ) + opt( + "--interpol", + dest="interpol", + metavar="INTERPOL", + default="bilinear", + help="image interpolation method", + ) + opt("--dpi", dest="dpi", type="int", default=96, help="plot resolution") + opt( + "--vmin", + dest="vmin", + type="int", + default=None, + help="minimum value for colormapping", + ) + opt( + "--vmax", + dest="vmax", + type="int", + default=None, + help="maximum value for colormapping", + ) + opt( + "--debug", + dest="debug", + action="store_true", + default=False, + help="raise exception on failures", + ) + opt( + "--doctest", + dest="doctest", + action="store_true", + default=False, + help="runs the docstring examples", + ) + opt("-v", "--detail", dest="detail", type="int", default=2) + opt("-q", "--quiet", dest="quiet", action="store_true") + + settings, path = parser.parse_args() + path = " ".join(path) + + if settings.doctest: + import doctest + + doctest.testmod(optionflags=doctest.ELLIPSIS) + return 0 + if not path: + path = askopenfilename( + title="Select a TIFF file", filetypes=TIFF.FILEOPEN_FILTER + ) + if not path: + parser.error("No file specified") + + if any(i in path for i in "?*"): + path = glob.glob(path) + if not path: + print("no files match the pattern") + return 0 + # TODO: handle image sequences + path = path[0] + + if not settings.quiet: + print("\nReading file structure...", end=" ") + start = time.time() + try: + tif = TiffFile(path, multifile=not settings.nomultifile) + except Exception as e: + if settings.debug: + raise + else: + print("\n", e) + sys.exit(0) + if not settings.quiet: + print("%.3f ms" % ((time.time() - start) * 1e3)) + + if tif.is_ome: + settings.norgb = True + + images = [] + if settings.noplots > 0: + if not settings.quiet: + print("Reading image data... ", end=" ") + + def notnone(x): + return next(i for i in x if i is not None) + + start = time.time() + try: + if settings.page >= 0: + images = [(tif.asarray(key=settings.page), tif[settings.page], None)] + elif settings.series >= 0: + images = [ + ( + tif.asarray(series=settings.series), + notnone(tif.series[settings.series]._pages), + tif.series[settings.series], + ) + ] + else: + images = [] + for i, s in enumerate(tif.series[: settings.noplots]): + try: + images.append( + (tif.asarray(series=i), notnone(s._pages), tif.series[i]) + ) + except ValueError as e: + images.append((None, notnone(s.pages), None)) + if settings.debug: + raise + else: + print("\nSeries %i failed: %s... " % (i, e), end="") + if not settings.quiet: + print("%.3f ms" % ((time.time() - start) * 1e3)) + except Exception as e: + if settings.debug: + raise + else: + print(e) + + if not settings.quiet: + print() + print(TiffFile.__str__(tif, detail=int(settings.detail))) + print() + tif.close() + + if images and settings.noplots > 0: + try: + import matplotlib + + matplotlib.use("TkAgg") + from matplotlib import pyplot + except ImportError as e: + warnings.warn("failed to import matplotlib.\n%s" % e) + else: + for img, page, series in images: + if img is None: + continue + vmin, vmax = settings.vmin, settings.vmax + if "GDAL_NODATA" in page.tags: + try: + vmin = numpy.min( + img[img > float(page.tags["GDAL_NODATA"].value)] + ) + except ValueError: + pass + if tif.is_stk: + try: + vmin = tif.stk_metadata["MinScale"] + vmax = tif.stk_metadata["MaxScale"] + except KeyError: + pass + else: + if vmax <= vmin: + vmin, vmax = settings.vmin, settings.vmax + if series: + title = "%s\n%s\n%s" % (str(tif), str(page), str(series)) + else: + title = "%s\n %s" % (str(tif), str(page)) + photometric = "MINISBLACK" + if page.photometric not in (3,): + photometric = TIFF.PHOTOMETRIC(page.photometric).name + imshow( + img, + title=title, + vmin=vmin, + vmax=vmax, + bitspersample=page.bitspersample, + photometric=photometric, + interpolation=settings.interpol, + dpi=settings.dpi, + ) + pyplot.show() + + +if sys.version_info[0] == 2: + inttypes = int, long # noqa + + def print_(*args, **kwargs): + """Print function with flush support.""" + flush = kwargs.pop("flush", False) + print(*args, **kwargs) + if flush: + sys.stdout.flush() + + def bytes2str(b, encoding=None, errors=None): + """Return string from bytes.""" + return b + + def str2bytes(s, encoding=None): + """Return bytes from string.""" + return s + + def byte2int(b): + """Return value of byte as int.""" + return ord(b) + + class FileNotFoundError(IOError): + pass + + TiffFrame = TiffPage # noqa +else: + inttypes = int + basestring = str, bytes + unicode = str + print_ = print + + def bytes2str(b, encoding=None, errors="strict"): + """Return unicode string from encoded bytes.""" + if encoding is not None: + return b.decode(encoding, errors) + try: + return b.decode("utf-8", errors) + except UnicodeDecodeError: + return b.decode("cp1252", errors) + + def str2bytes(s, encoding="cp1252"): + """Return bytes from unicode string.""" + return s.encode(encoding) + + def byte2int(b): + """Return value of byte as int.""" + return b + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.venv/Lib/site-packages/imageio/plugins/bsdf.py b/.venv/Lib/site-packages/imageio/plugins/bsdf.py new file mode 100644 index 00000000..041d7e52 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/bsdf.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read/Write BSDF files. + +Backend Library: internal + +The BSDF format enables reading and writing of image data in the +BSDF serialization format. This format allows storage of images, volumes, +and series thereof. Data can be of any numeric data type, and can +optionally be compressed. Each image/volume can have associated +meta data, which can consist of any data type supported by BSDF. + +By default, image data is lazily loaded; the actual image data is +not read until it is requested. This allows storing multiple images +in a single file and still have fast access to individual images. +Alternatively, a series of images can be read in streaming mode, reading +images as they are read (e.g. from http). + +BSDF is a simple generic binary format. It is easy to extend and there +are standard extension definitions for 2D and 3D image data. +Read more at http://bsdf.io. + + +Parameters +---------- +random_access : bool + Whether individual images in the file can be read in random order. + Defaults to True for normal files, and to False when reading from HTTP. + If False, the file is read in "streaming mode", allowing reading + files as they are read, but without support for "rewinding". + Note that setting this to True when reading from HTTP, the whole file + is read upon opening it (since lazy loading is not possible over HTTP). + +compression : int + Use ``0`` or "no" for no compression, ``1`` or "zlib" for Zlib + compression (same as zip files and PNG), and ``2`` or "bz2" for Bz2 + compression (more compact but slower). Default 1 (zlib). + Note that some BSDF implementations may not support compression + (e.g. JavaScript). + +""" + +import numpy as np + +from ..core import Format + + +def get_bsdf_serializer(options): + from . import _bsdf as bsdf + + class NDArrayExtension(bsdf.Extension): + """Copy of BSDF's NDArrayExtension but deal with lazy blobs.""" + + name = "ndarray" + cls = np.ndarray + + def encode(self, s, v): + return dict(shape=v.shape, dtype=str(v.dtype), data=v.tobytes()) + + def decode(self, s, v): + return v # return as dict, because of lazy blobs, decode in Image + + class ImageExtension(bsdf.Extension): + """We implement two extensions that trigger on the Image classes.""" + + def encode(self, s, v): + return dict(array=v.array, meta=v.meta) + + def decode(self, s, v): + return Image(v["array"], v["meta"]) + + class Image2DExtension(ImageExtension): + name = "image2d" + cls = Image2D + + class Image3DExtension(ImageExtension): + name = "image3d" + cls = Image3D + + exts = [NDArrayExtension, Image2DExtension, Image3DExtension] + serializer = bsdf.BsdfSerializer(exts, **options) + + return bsdf, serializer + + +class Image: + """Class in which we wrap the array and meta data. By using an extension + we can make BSDF trigger on these classes and thus encode the images. + as actual images. + """ + + def __init__(self, array, meta): + self.array = array + self.meta = meta + + def get_array(self): + if not isinstance(self.array, np.ndarray): + v = self.array + blob = v["data"] + if not isinstance(blob, bytes): # then it's a lazy bsdf.Blob + blob = blob.get_bytes() + self.array = np.frombuffer(blob, dtype=v["dtype"]) + self.array.shape = v["shape"] + return self.array + + def get_meta(self): + return self.meta + + +class Image2D(Image): + pass + + +class Image3D(Image): + pass + + +class BsdfFormat(Format): + """The BSDF format enables reading and writing of image data in the + BSDF serialization format. This format allows storage of images, volumes, + and series thereof. Data can be of any numeric data type, and can + optionally be compressed. Each image/volume can have associated + meta data, which can consist of any data type supported by BSDF. + + By default, image data is lazily loaded; the actual image data is + not read until it is requested. This allows storing multiple images + in a single file and still have fast access to individual images. + Alternatively, a series of images can be read in streaming mode, reading + images as they are read (e.g. from http). + + BSDF is a simple generic binary format. It is easy to extend and there + are standard extension definitions for 2D and 3D image data. + Read more at http://bsdf.io. + + Parameters for reading + ---------------------- + random_access : bool + Whether individual images in the file can be read in random order. + Defaults to True for normal files, and to False when reading from HTTP. + If False, the file is read in "streaming mode", allowing reading + files as they are read, but without support for "rewinding". + Note that setting this to True when reading from HTTP, the whole file + is read upon opening it (since lazy loading is not possible over HTTP). + + Parameters for saving + --------------------- + compression : {0, 1, 2} + Use ``0`` or "no" for no compression, ``1`` or "zlib" for Zlib + compression (same as zip files and PNG), and ``2`` or "bz2" for Bz2 + compression (more compact but slower). Default 1 (zlib). + Note that some BSDF implementations may not support compression + (e.g. JavaScript). + + """ + + def _can_read(self, request): + if request.mode[1] in (self.modes + "?"): + # if request.extension in self.extensions: + # return True + if request.firstbytes.startswith(b"BSDF"): + return True + + def _can_write(self, request): + if request.mode[1] in (self.modes + "?"): + if request.extension in self.extensions: + return True + + # -- reader + + class Reader(Format.Reader): + def _open(self, random_access=None): + # Validate - we need a BSDF file consisting of a list of images + # The list is typically a stream, but does not have to be. + assert self.request.firstbytes[:4] == b"BSDF", "Not a BSDF file" + # self.request.firstbytes[5:6] == major and minor version + if not ( + self.request.firstbytes[6:15] == b"M\x07image2D" + or self.request.firstbytes[6:15] == b"M\x07image3D" + or self.request.firstbytes[6:7] == b"l" + ): + pass # Actually, follow a more duck-type approach ... + # raise RuntimeError('BSDF file does not look like an ' + # 'image container.') + # Set options. If we think that seeking is allowed, we lazily load + # blobs, and set streaming to False (i.e. the whole file is read, + # but we skip over binary blobs), so that we subsequently allow + # random access to the images. + # If seeking is not allowed (e.g. with a http request), we cannot + # lazily load blobs, but we can still load streaming from the web. + options = {} + if self.request.filename.startswith(("http://", "https://")): + ra = False if random_access is None else bool(random_access) + options["lazy_blob"] = False # Because we cannot seek now + options["load_streaming"] = not ra # Load as a stream? + else: + ra = True if random_access is None else bool(random_access) + options["lazy_blob"] = ra # Don't read data until needed + options["load_streaming"] = not ra + + file = self.request.get_file() + bsdf, self._serializer = get_bsdf_serializer(options) + self._stream = self._serializer.load(file) + # Another validation + if ( + isinstance(self._stream, dict) + and "meta" in self._stream + and "array" in self._stream + ): + self._stream = Image(self._stream["array"], self._stream["meta"]) + if not isinstance(self._stream, (Image, list, bsdf.ListStream)): + raise RuntimeError( + "BSDF file does not look seem to have an " "image container." + ) + + def _close(self): + pass + + def _get_length(self): + if isinstance(self._stream, Image): + return 1 + elif isinstance(self._stream, list): + return len(self._stream) + elif self._stream.count < 0: + return np.inf + return self._stream.count + + def _get_data(self, index): + # Validate + if index < 0 or index >= self.get_length(): + raise IndexError( + "Image index %i not in [0 %i]." % (index, self.get_length()) + ) + # Get Image object + if isinstance(self._stream, Image): + image_ob = self._stream # singleton + elif isinstance(self._stream, list): + # Easy when we have random access + image_ob = self._stream[index] + else: + # For streaming, we need to skip over frames + if index < self._stream.index: + raise IndexError( + "BSDF file is being read in streaming " + "mode, thus does not allow rewinding." + ) + while index > self._stream.index: + self._stream.next() + image_ob = self._stream.next() # Can raise StopIteration + # Is this an image? + if ( + isinstance(image_ob, dict) + and "meta" in image_ob + and "array" in image_ob + ): + image_ob = Image(image_ob["array"], image_ob["meta"]) + if isinstance(image_ob, Image): + # Return as array (if we have lazy blobs, they are read now) + return image_ob.get_array(), image_ob.get_meta() + else: + r = repr(image_ob) + r = r if len(r) < 200 else r[:197] + "..." + raise RuntimeError("BSDF file contains non-image " + r) + + def _get_meta_data(self, index): # pragma: no cover + return {} # This format does not support global meta data + + # -- writer + + class Writer(Format.Writer): + def _open(self, compression=1): + options = {"compression": compression} + bsdf, self._serializer = get_bsdf_serializer(options) + if self.request.mode[1] in "iv": + self._stream = None # Singleton image + self._written = False + else: + # Series (stream) of images + file = self.request.get_file() + self._stream = bsdf.ListStream() + self._serializer.save(file, self._stream) + + def _close(self): + # We close the stream here, which will mark the number of written + # elements. If we would not close it, the file would be fine, it's + # just that upon reading it would not be known how many items are + # in there. + if self._stream is not None: + self._stream.close(False) # False says "keep this a stream" + + def _append_data(self, im, meta): + # Determine dimension + ndim = None + if self.request.mode[1] in "iI": + ndim = 2 + elif self.request.mode[1] in "vV": + ndim = 3 + else: + ndim = 3 # Make an educated guess + if im.ndim == 2 or (im.ndim == 3 and im.shape[-1] <= 4): + ndim = 2 + # Validate shape + assert ndim in (2, 3) + if ndim == 2: + assert im.ndim == 2 or (im.ndim == 3 and im.shape[-1] <= 4) + else: + assert im.ndim == 3 or (im.ndim == 4 and im.shape[-1] <= 4) + # Wrap data and meta data in our special class that will trigger + # the BSDF image2D or image3D extension. + if ndim == 2: + ob = Image2D(im, meta) + else: + ob = Image3D(im, meta) + # Write directly or to stream + if self._stream is None: + assert not self._written, "Cannot write singleton image twice" + self._written = True + file = self.request.get_file() + self._serializer.save(file, ob) + else: + self._stream.append(ob) + + def set_meta_data(self, meta): # pragma: no cover + raise RuntimeError("The BSDF format only supports " "per-image meta data.") diff --git a/.venv/Lib/site-packages/imageio/plugins/dicom.py b/.venv/Lib/site-packages/imageio/plugins/dicom.py new file mode 100644 index 00000000..c5f36644 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/dicom.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +"""Read DICOM files. + +Backend Library: internal + +A format for reading DICOM images: a common format used to store +medical image data, such as X-ray, CT and MRI. + +This format borrows some code (and ideas) from the pydicom project. However, +only a predefined subset of tags are extracted from the file. This allows +for great simplifications allowing us to make a stand-alone reader, and +also results in a much faster read time. + +By default, only uncompressed and deflated transfer syntaxes are supported. +If gdcm or dcmtk is installed, these will be used to automatically convert +the data. See https://github.com/malaterre/GDCM/releases for installing GDCM. + +This format provides functionality to group images of the same +series together, thus extracting volumes (and multiple volumes). +Using volread will attempt to yield a volume. If multiple volumes +are present, the first one is given. Using mimread will simply yield +all images in the given directory (not taking series into account). + +Parameters +---------- +progress : {True, False, BaseProgressIndicator} + Whether to show progress when reading from multiple files. + Default True. By passing an object that inherits from + BaseProgressIndicator, the way in which progress is reported + can be costumized. + +""" + +# todo: Use pydicom: +# * Note: is not py3k ready yet +# * Allow reading the full meta info +# I think we can more or less replace the SimpleDicomReader with a +# pydicom.Dataset For series, only ned to read the full info from one +# file: speed still high +# * Perhaps allow writing? + +import os +import sys +import logging +import subprocess + +from ..core import Format, BaseProgressIndicator, StdoutProgressIndicator +from ..core import read_n_bytes + +_dicom = None # lazily loaded in load_lib() + +logger = logging.getLogger(__name__) + + +def load_lib(): + global _dicom + from . import _dicom + + return _dicom + + +# Determine endianity of system +sys_is_little_endian = sys.byteorder == "little" + + +def get_dcmdjpeg_exe(): + fname = "dcmdjpeg" + ".exe" * sys.platform.startswith("win") + for dir in ( + "c:\\dcmtk", + "c:\\Program Files", + "c:\\Program Files\\dcmtk", + "c:\\Program Files (x86)\\dcmtk", + ): + filename = os.path.join(dir, fname) + if os.path.isfile(filename): + return [filename] + + try: + subprocess.check_call([fname, "--version"]) + return [fname] + except Exception: + return None + + +def get_gdcmconv_exe(): + fname = "gdcmconv" + ".exe" * sys.platform.startswith("win") + # Maybe it's on the path + try: + subprocess.check_call([fname, "--version"]) + return [fname, "--raw"] + except Exception: + pass + # Select directories where it could be + candidates = [] + base_dirs = [r"c:\Program Files"] + for base_dir in base_dirs: + if os.path.isdir(base_dir): + for dname in os.listdir(base_dir): + if dname.lower().startswith("gdcm"): + suffix = dname[4:].strip() + candidates.append((suffix, os.path.join(base_dir, dname))) + # Sort, so higher versions are tried earlier + candidates.sort(reverse=True) + # Select executable + filename = None + for _, dirname in candidates: + exe1 = os.path.join(dirname, "gdcmconv.exe") + exe2 = os.path.join(dirname, "bin", "gdcmconv.exe") + if os.path.isfile(exe1): + filename = exe1 + break + if os.path.isfile(exe2): + filename = exe2 + break + else: + return None + return [filename, "--raw"] + + +class DicomFormat(Format): + """See :mod:`imageio.plugins.dicom`""" + + def _can_read(self, request): + # If user URI was a directory, we check whether it has a DICOM file + if os.path.isdir(request.filename): + files = os.listdir(request.filename) + for fname in sorted(files): # Sorting make it consistent + filename = os.path.join(request.filename, fname) + if os.path.isfile(filename) and "DICOMDIR" not in fname: + with open(filename, "rb") as f: + first_bytes = read_n_bytes(f, 140) + return first_bytes[128:132] == b"DICM" + else: + return False + # Check + return request.firstbytes[128:132] == b"DICM" + + def _can_write(self, request): + # We cannot save yet. May be possible if we will used pydicom as + # a backend. + return False + + # -- + + class Reader(Format.Reader): + _compressed_warning_dirs = set() + + def _open(self, progress=True): + if not _dicom: + load_lib() + if os.path.isdir(self.request.filename): + # A dir can be given if the user used the format explicitly + self._info = {} + self._data = None + else: + # Read the given dataset now ... + try: + dcm = _dicom.SimpleDicomReader(self.request.get_file()) + except _dicom.CompressedDicom as err: + # We cannot do this on our own. Perhaps with some help ... + cmd = get_gdcmconv_exe() + if not cmd and "JPEG" in str(err): + cmd = get_dcmdjpeg_exe() + if not cmd: + msg = err.args[0].replace("using", "installing") + msg = msg.replace("convert", "auto-convert") + err.args = (msg,) + raise + else: + fname1 = self.request.get_local_filename() + fname2 = fname1 + ".raw" + try: + subprocess.check_call(cmd + [fname1, fname2]) + except Exception: + raise err + d = os.path.dirname(fname1) + if d not in self._compressed_warning_dirs: + self._compressed_warning_dirs.add(d) + logger.warning( + "DICOM file contained compressed data. " + + "Autoconverting with " + + cmd[0] + + " (this warning is shown once for each directory)" + ) + dcm = _dicom.SimpleDicomReader(fname2) + + self._info = dcm._info + self._data = dcm.get_numpy_array() + + # Initialize series, list of DicomSeries objects + self._series = None # only created if needed + + # Set progress indicator + if isinstance(progress, BaseProgressIndicator): + self._progressIndicator = progress + elif progress is True: + p = StdoutProgressIndicator("Reading DICOM") + self._progressIndicator = p + elif progress in (None, False): + self._progressIndicator = BaseProgressIndicator("Dummy") + else: + raise ValueError("Invalid value for progress.") + + def _close(self): + # Clean up + self._info = None + self._data = None + self._series = None + + @property + def series(self): + if self._series is None: + pi = self._progressIndicator + self._series = _dicom.process_directory(self.request, pi) + return self._series + + def _get_length(self): + if self._data is None: + dcm = self.series[0][0] + self._info = dcm._info + self._data = dcm.get_numpy_array() + + nslices = self._data.shape[0] if (self._data.ndim == 3) else 1 + + if self.request.mode[1] == "i": + # User expects one, but lets be honest about this file + return nslices + elif self.request.mode[1] == "I": + # User expects multiple, if this file has multiple slices, ok. + # Otherwise we have to check the series. + if nslices > 1: + return nslices + else: + return sum([len(serie) for serie in self.series]) + elif self.request.mode[1] == "v": + # User expects a volume, if this file has one, ok. + # Otherwise we have to check the series + if nslices > 1: + return 1 + else: + return len(self.series) # We assume one volume per series + elif self.request.mode[1] == "V": + # User expects multiple volumes. We have to check the series + return len(self.series) # We assume one volume per series + else: + raise RuntimeError("DICOM plugin should know what to expect.") + + def _get_slice_data(self, index): + nslices = self._data.shape[0] if (self._data.ndim == 3) else 1 + + # Allow index >1 only if this file contains >1 + if nslices > 1: + return self._data[index], self._info + elif index == 0: + return self._data, self._info + else: + raise IndexError("Dicom file contains only one slice.") + + def _get_data(self, index): + if self._data is None: + dcm = self.series[0][0] + self._info = dcm._info + self._data = dcm.get_numpy_array() + + nslices = self._data.shape[0] if (self._data.ndim == 3) else 1 + + if self.request.mode[1] == "i": + return self._get_slice_data(index) + elif self.request.mode[1] == "I": + # Return slice from volume, or return item from series + if index == 0 and nslices > 1: + return self._data[index], self._info + else: + L = [] + for serie in self.series: + L.extend([dcm_ for dcm_ in serie]) + return L[index].get_numpy_array(), L[index].info + elif self.request.mode[1] in "vV": + # Return volume or series + if index == 0 and nslices > 1: + return self._data, self._info + else: + return ( + self.series[index].get_numpy_array(), + self.series[index].info, + ) + # mode is `?` (typically because we are using V3). If there is a + # series (multiple files), index referrs to the element of the + # series and we read volumes. If there is no series, index + # referrs to the slice in the volume we read "flat" images. + elif len(self.series) > 1: + # mode is `?` and there are multiple series. Each series is a ndimage. + return ( + self.series[index].get_numpy_array(), + self.series[index].info, + ) + else: + # mode is `?` and there is only one series. Each slice is an ndimage. + return self._get_slice_data(index) + + def _get_meta_data(self, index): + if self._data is None: + dcm = self.series[0][0] + self._info = dcm._info + self._data = dcm.get_numpy_array() + + nslices = self._data.shape[0] if (self._data.ndim == 3) else 1 + + # Default is the meta data of the given file, or the "first" file. + if index is None: + return self._info + + if self.request.mode[1] == "i": + return self._info + elif self.request.mode[1] == "I": + # Return slice from volume, or return item from series + if index == 0 and nslices > 1: + return self._info + else: + L = [] + for serie in self.series: + L.extend([dcm_ for dcm_ in serie]) + return L[index].info + elif self.request.mode[1] in "vV": + # Return volume or series + if index == 0 and nslices > 1: + return self._info + else: + return self.series[index].info + else: # pragma: no cover + raise ValueError("DICOM plugin should know what to expect.") diff --git a/.venv/Lib/site-packages/imageio/plugins/example.py b/.venv/Lib/site-packages/imageio/plugins/example.py new file mode 100644 index 00000000..b7cf8b9b --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/example.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Example plugin. You can use this as a template for your own plugin. +""" + +import numpy as np + +from .. import formats +from ..core import Format + + +class DummyFormat(Format): + """The dummy format is an example format that does nothing. + It will never indicate that it can read or write a file. When + explicitly asked to read, it will simply read the bytes. When + explicitly asked to write, it will raise an error. + + This documentation is shown when the user does ``help('thisformat')``. + + Parameters for reading + ---------------------- + Specify arguments in numpy doc style here. + + Parameters for saving + --------------------- + Specify arguments in numpy doc style here. + + """ + + def _can_read(self, request): + # This method is called when the format manager is searching + # for a format to read a certain image. Return True if this format + # can do it. + # + # The format manager is aware of the extensions and the modes + # that each format can handle. It will first ask all formats + # that *seem* to be able to read it whether they can. If none + # can, it will ask the remaining formats if they can: the + # extension might be missing, and this allows formats to provide + # functionality for certain extensions, while giving preference + # to other plugins. + # + # If a format says it can, it should live up to it. The format + # would ideally check the request.firstbytes and look for a + # header of some kind. + # + # The request object has: + # request.filename: a representation of the source (only for reporting) + # request.firstbytes: the first 256 bytes of the file. + # request.mode[0]: read or write mode + + if request.extension in self.extensions: + return True + + def _can_write(self, request): + # This method is called when the format manager is searching + # for a format to write a certain image. It will first ask all + # formats that *seem* to be able to write it whether they can. + # If none can, it will ask the remaining formats if they can. + # + # Return True if the format can do it. + + # In most cases, this code does suffice: + if request.extension in self.extensions: + return True + + # -- reader + + class Reader(Format.Reader): + def _open(self, some_option=False, length=1): + # Specify kwargs here. Optionally, the user-specified kwargs + # can also be accessed via the request.kwargs object. + # + # The request object provides two ways to get access to the + # data. Use just one: + # - Use request.get_file() for a file object (preferred) + # - Use request.get_local_filename() for a file on the system + self._fp = self.request.get_file() + self._length = length # passed as an arg in this case for testing + self._data = None + + def _close(self): + # Close the reader. + # Note that the request object will close self._fp + pass + + def _get_length(self): + # Return the number of images. Can be np.inf + return self._length + + def _get_data(self, index): + # Return the data and meta data for the given index + if index >= self._length: + raise IndexError("Image index %i > %i" % (index, self._length)) + # Read all bytes + if self._data is None: + self._data = self._fp.read() + # Put in a numpy array + im = np.frombuffer(self._data, "uint8") + im.shape = len(im), 1 + # Return array and dummy meta data + return im, {} + + def _get_meta_data(self, index): + # Get the meta data for the given index. If index is None, it + # should return the global meta data. + return {} # This format does not support meta data + + # -- writer + + class Writer(Format.Writer): + def _open(self, flags=0): + # Specify kwargs here. Optionally, the user-specified kwargs + # can also be accessed via the request.kwargs object. + # + # The request object provides two ways to write the data. + # Use just one: + # - Use request.get_file() for a file object (preferred) + # - Use request.get_local_filename() for a file on the system + self._fp = self.request.get_file() + + def _close(self): + # Close the reader. + # Note that the request object will close self._fp + pass + + def _append_data(self, im, meta): + # Process the given data and meta data. + raise RuntimeError("The dummy format cannot write image data.") + + def set_meta_data(self, meta): + # Process the given meta data (global for all images) + # It is not mandatory to support this. + raise RuntimeError("The dummy format cannot write meta data.") + + +# Register. You register an *instance* of a Format class. Here specify: +format = DummyFormat( + "dummy", # short name + "An example format that does nothing.", # one line descr. + ".foobar .nonexistentext", # list of extensions + "iI", # modes, characters in iIvV +) +formats.add_format(format) diff --git a/.venv/Lib/site-packages/imageio/plugins/feisem.py b/.venv/Lib/site-packages/imageio/plugins/feisem.py new file mode 100644 index 00000000..af50768a --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/feisem.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +"""Read TIFF from FEI SEM microscopes. + +Backend Library: internal + +This format is based on :mod:`TIFF `, and supports the +same parameters. FEI microscopes append metadata as ASCII text at the end of the +file, which this reader correctly extracts. + +Parameters +---------- +discard_watermark : bool + If True (default), discard the bottom rows of the image, which + contain no image data, only a watermark with metadata. +watermark_height : int + The height in pixels of the FEI watermark. The default is 70. + +See Also +-------- + :mod:`imageio.plugins.tifffile` + +""" + + +from .tifffile import TiffFormat + + +class FEISEMFormat(TiffFormat): + """See :mod:`imageio.plugins.feisem`""" + + def _can_write(self, request): + return False # FEI-SEM only supports reading + + class Reader(TiffFormat.Reader): + def _get_data(self, index=0, discard_watermark=True, watermark_height=70): + """Get image and metadata from given index. + + FEI images usually (always?) contain a watermark at the + bottom of the image, 70 pixels high. We discard this by + default as it does not contain any information not present + in the metadata. + """ + im, meta = super(FEISEMFormat.Reader, self)._get_data(index) + if discard_watermark: + im = im[:-watermark_height] + return im, meta + + def _get_meta_data(self, index=None): + """Read the metadata from an FEI SEM TIFF. + + This metadata is included as ASCII text at the end of the file. + + The index, if provided, is ignored. + + Returns + ------- + metadata : dict + Dictionary of metadata. + """ + if hasattr(self, "_fei_meta"): + return self._fei_meta + + md = {"root": {}} + current_tag = "root" + reading_metadata = False + filename = self.request.get_local_filename() + with open(filename, encoding="utf8", errors="ignore") as fin: + for line in fin: + if not reading_metadata: + if not line.startswith("Date="): + continue + else: + reading_metadata = True + line = line.rstrip() + if line.startswith("["): + current_tag = line.lstrip("[").rstrip("]") + md[current_tag] = {} + else: + if "=" in line: # ignore empty and irrelevant lines + key, val = line.split("=", maxsplit=1) + for tag_type in (int, float): + try: + val = tag_type(val) + except ValueError: + continue + else: + break + md[current_tag][key] = val + if not md["root"] and len(md) == 1: + raise ValueError("Input file %s contains no FEI metadata." % filename) + + self._fei_meta = md + return md diff --git a/.venv/Lib/site-packages/imageio/plugins/ffmpeg.py b/.venv/Lib/site-packages/imageio/plugins/ffmpeg.py new file mode 100644 index 00000000..ce47323b --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/ffmpeg.py @@ -0,0 +1,729 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +"""Read/Write video using FFMPEG + +.. note:: + We are in the process of (slowly) replacing this plugin with a new one that + is based on `pyav `_. It is faster and more + flexible than the plugin documented here. Check the :mod:`pyav + plugin's documentation ` for more information about + this plugin. + +Backend Library: https://github.com/imageio/imageio-ffmpeg + +.. note:: + To use this plugin you have to install its backend:: + + pip install imageio[ffmpeg] + + +The ffmpeg format provides reading and writing for a wide range of movie formats +such as .avi, .mpeg, .mp4, etc. as well as the ability to read streams from +webcams and USB cameras. It is based on ffmpeg and is inspired by/based `moviepy +`_ by Zulko. + +Parameters for reading +---------------------- +fps : scalar + The number of frames per second of the input stream. Default None (i.e. + read at the file's native fps). One can use this for files with a + variable fps, or in cases where imageio is unable to correctly detect + the fps. In case of trouble opening camera streams, it may help to set an + explicit fps value matching a framerate supported by the camera. +loop : bool + If True, the video will rewind as soon as a frame is requested + beyond the last frame. Otherwise, IndexError is raised. Default False. + Setting this to True will internally call ``count_frames()``, + and set the reader's length to that value instead of inf. +size : str | tuple + The frame size (i.e. resolution) to read the images, e.g. + (100, 100) or "640x480". For camera streams, this allows setting + the capture resolution. For normal video data, ffmpeg will + rescale the data. +dtype : str | type + The dtype for the output arrays. Determines the bit-depth that + is requested from ffmpeg. Supported dtypes: uint8, uint16. + Default: uint8. +pixelformat : str + The pixel format for the camera to use (e.g. "yuyv422" or + "gray"). The camera needs to support the format in order for + this to take effect. Note that the images produced by this + reader are always RGB. +input_params : list + List additional arguments to ffmpeg for input file options. + (Can also be provided as ``ffmpeg_params`` for backwards compatibility) + Example ffmpeg arguments to use aggressive error handling: + ['-err_detect', 'aggressive'] +output_params : list + List additional arguments to ffmpeg for output file options (i.e. the + stream being read by imageio). +print_info : bool + Print information about the video file as reported by ffmpeg. + +Parameters for writing +---------------------- +fps : scalar + The number of frames per second. Default 10. +codec : str + the video codec to use. Default 'libx264', which represents the + widely available mpeg4. Except when saving .wmv files, then the + defaults is 'msmpeg4' which is more commonly supported for windows +quality : float | None + Video output quality. Default is 5. Uses variable bit rate. Highest + quality is 10, lowest is 0. Set to None to prevent variable bitrate + flags to FFMPEG so you can manually specify them using output_params + instead. Specifying a fixed bitrate using 'bitrate' disables this + parameter. +bitrate : int | None + Set a constant bitrate for the video encoding. Default is None causing + 'quality' parameter to be used instead. Better quality videos with + smaller file sizes will result from using the 'quality' variable + bitrate parameter rather than specifying a fixed bitrate with this + parameter. +pixelformat: str + The output video pixel format. Default is 'yuv420p' which most widely + supported by video players. +input_params : list + List additional arguments to ffmpeg for input file options (i.e. the + stream that imageio provides). +output_params : list + List additional arguments to ffmpeg for output file options. + (Can also be provided as ``ffmpeg_params`` for backwards compatibility) + Example ffmpeg arguments to use only intra frames and set aspect ratio: + ['-intra', '-aspect', '16:9'] +ffmpeg_log_level: str + Sets ffmpeg output log level. Default is "warning". + Values can be "quiet", "panic", "fatal", "error", "warning", "info" + "verbose", or "debug". Also prints the FFMPEG command being used by + imageio if "info", "verbose", or "debug". +macro_block_size: int + Size constraint for video. Width and height, must be divisible by this + number. If not divisible by this number imageio will tell ffmpeg to + scale the image up to the next closest size + divisible by this number. Most codecs are compatible with a macroblock + size of 16 (default), some can go smaller (4, 8). To disable this + automatic feature set it to None or 1, however be warned many players + can't decode videos that are odd in size and some codecs will produce + poor results or fail. See https://en.wikipedia.org/wiki/Macroblock. +audio_path : str | None + Audio path of any audio that needs to be written. Defaults to nothing, + so no audio will be written. Please note, when writing shorter video + than the original, ffmpeg will not truncate the audio track; it + will maintain its original length and be longer than the video. +audio_codec : str | None + The audio codec to use. Defaults to nothing, but if an audio_path has + been provided ffmpeg will attempt to set a default codec. + +Notes +----- +If you are using anaconda and ``anaconda/ffmpeg`` you will not be able to +encode/decode H.264 (likely due to licensing concerns). If you need this +format on anaconda install ``conda-forge/ffmpeg`` instead. + +You can use the ``IMAGEIO_FFMPEG_EXE`` environment variable to force using a +specific ffmpeg executable. + +To get the number of frames before having read them all, you can use the +``reader.count_frames()`` method (the reader will then use +``imageio_ffmpeg.count_frames_and_secs()`` to get the exact number of frames, +note that this operation can take a few seconds on large files). Alternatively, +the number of frames can be estimated from the fps and duration in the meta data +(though these values themselves are not always present/reliable). + +""" + +import re +import sys +import time +import logging +import platform +import threading +import subprocess as sp +import imageio_ffmpeg + +import numpy as np + +from ..core import Format, image_as_uint + +logger = logging.getLogger(__name__) + +# Get camera format +if sys.platform.startswith("win"): + CAM_FORMAT = "dshow" # dshow or vfwcap +elif sys.platform.startswith("linux"): + CAM_FORMAT = "video4linux2" +elif sys.platform.startswith("darwin"): + CAM_FORMAT = "avfoundation" +else: # pragma: no cover + CAM_FORMAT = "unknown-cam-format" + + +def download(directory=None, force_download=False): # pragma: no cover + raise RuntimeError( + "imageio.ffmpeg.download() has been deprecated. " + "Use 'pip install imageio-ffmpeg' instead.'" + ) + + +# For backwards compatibility - we dont use this ourselves +def get_exe(): # pragma: no cover + """Wrapper for imageio_ffmpeg.get_ffmpeg_exe()""" + + return imageio_ffmpeg.get_ffmpeg_exe() + + +class FfmpegFormat(Format): + """Read/Write ImageResources using FFMPEG. + + See :mod:`imageio.plugins.ffmpeg` + """ + + def _can_read(self, request): + # Read from video stream? + # Note that we could write the _video flag here, but a user might + # select this format explicitly (and this code is not run) + if re.match(r"", request.filename): + return True + + # Read from file that we know? + if request.extension in self.extensions: + return True + + def _can_write(self, request): + if request.extension in self.extensions: + return True + + # -- + + class Reader(Format.Reader): + _frame_catcher = None + _read_gen = None + + def _get_cam_inputname(self, index): + if sys.platform.startswith("linux"): + return "/dev/" + self.request._video[1:-1] + + elif sys.platform.startswith("win"): + # Ask ffmpeg for list of dshow device names + ffmpeg_api = imageio_ffmpeg + cmd = [ + ffmpeg_api.get_ffmpeg_exe(), + "-list_devices", + "true", + "-f", + CAM_FORMAT, + "-i", + "dummy", + ] + # Set `shell=True` in sp.run to prevent popup of a command + # line window in frozen applications. Note: this would be a + # security vulnerability if user-input goes into the cmd. + # Note that the ffmpeg process returns with exit code 1 when + # using `-list_devices` (or `-list_options`), even if the + # command is successful, so we set `check=False` explicitly. + completed_process = sp.run( + cmd, + stdout=sp.PIPE, + stderr=sp.PIPE, + encoding="utf-8", + shell=True, + check=False, + ) + + # Return device name at index + try: + name = parse_device_names(completed_process.stderr)[index] + except IndexError: + raise IndexError("No ffdshow camera at index %i." % index) + return "video=%s" % name + + elif sys.platform.startswith("darwin"): + # Appears that newer ffmpeg builds don't support -list-devices + # on OS X. But you can directly open the camera by index. + name = str(index) + return name + + else: # pragma: no cover + return "??" + + def _open( + self, + loop=False, + size=None, + dtype=None, + pixelformat=None, + print_info=False, + ffmpeg_params=None, + input_params=None, + output_params=None, + fps=None, + ): + # Get generator functions + self._ffmpeg_api = imageio_ffmpeg + # Process input args + self._arg_loop = bool(loop) + if size is None: + self._arg_size = None + elif isinstance(size, tuple): + self._arg_size = "%ix%i" % size + elif isinstance(size, str) and "x" in size: + self._arg_size = size + else: + raise ValueError('FFMPEG size must be tuple of "NxM"') + if pixelformat is None: + pass + elif not isinstance(pixelformat, str): + raise ValueError("FFMPEG pixelformat must be str") + if dtype is None: + self._dtype = np.dtype("uint8") + else: + self._dtype = np.dtype(dtype) + allowed_dtypes = ["uint8", "uint16"] + if self._dtype.name not in allowed_dtypes: + raise ValueError( + "dtype must be one of: {}".format(", ".join(allowed_dtypes)) + ) + self._arg_pixelformat = pixelformat + self._arg_input_params = input_params or [] + self._arg_output_params = output_params or [] + self._arg_input_params += ffmpeg_params or [] # backward compat + # Write "_video"_arg - indicating webcam support + self.request._video = None + regex_match = re.match(r"", self.request.filename) + if regex_match: + self.request._video = self.request.filename + # Get local filename + if self.request._video: + index = int(regex_match.group(1)) + self._filename = self._get_cam_inputname(index) + else: + self._filename = self.request.get_local_filename() + # When passed to ffmpeg on command line, carets need to be escaped. + self._filename = self._filename.replace("^", "^^") + # Determine pixel format and depth + self._depth = 3 + if self._dtype.name == "uint8": + self._pix_fmt = "rgb24" + self._bytes_per_channel = 1 + else: + self._pix_fmt = "rgb48le" + self._bytes_per_channel = 2 + # Initialize parameters + self._pos = -1 + self._meta = {"plugin": "ffmpeg"} + self._lastread = None + + # Calculating this from fps and duration is not accurate, + # and calculating it exactly with ffmpeg_api.count_frames_and_secs + # takes too long to do for each video. But we need it for looping. + self._nframes = float("inf") + if self._arg_loop and not self.request._video: + self._nframes = self.count_frames() + self._meta["nframes"] = self._nframes + + # Specify input framerate? (only on macOS) + # Ideally we'd get the supported framerate from the metadata, but we get the + # metadata when we boot ffmpeg ... maybe we could refactor this so we can + # get the metadata beforehand, but for now we'll just give it 2 tries on MacOS, + # one with fps 30 and one with fps 15. + need_input_fps = need_output_fps = False + if self.request._video and platform.system().lower() == "darwin": + if "-framerate" not in str(self._arg_input_params): + need_input_fps = True + if not self.request.kwargs.get("fps", None): + need_output_fps = True + if need_input_fps: + self._arg_input_params.extend(["-framerate", str(float(30))]) + if need_output_fps: + self._arg_output_params.extend(["-r", str(float(30))]) + + # Start ffmpeg subprocess and get meta information + try: + self._initialize() + except IndexError: + # Specify input framerate again, this time different. + if need_input_fps: + self._arg_input_params[-1] = str(float(15)) + self._initialize() + else: + raise + + # For cameras, create thread that keeps reading the images + if self.request._video: + self._frame_catcher = FrameCatcher(self._read_gen) + + # For reference - but disabled, because it is inaccurate + # if self._meta["nframes"] == float("inf"): + # if self._meta.get("fps", 0) > 0: + # if self._meta.get("duration", 0) > 0: + # n = round(self._meta["duration"] * self._meta["fps"]) + # self._meta["nframes"] = int(n) + + def _close(self): + # First close the frame catcher, because we cannot close the gen + # if the frame catcher thread is using it + if self._frame_catcher is not None: + self._frame_catcher.stop_me() + self._frame_catcher = None + if self._read_gen is not None: + self._read_gen.close() + self._read_gen = None + + def count_frames(self): + """Count the number of frames. Note that this can take a few + seconds for large files. Also note that it counts the number + of frames in the original video and does not take a given fps + into account. + """ + # This would have been nice, but this does not work :( + # oargs = [] + # if self.request.kwargs.get("fps", None): + # fps = float(self.request.kwargs["fps"]) + # oargs += ["-r", "%.02f" % fps] + cf = self._ffmpeg_api.count_frames_and_secs + return cf(self._filename)[0] + + def _get_length(self): + return self._nframes # only not inf if loop is True + + def _get_data(self, index): + """Reads a frame at index. Note for coders: getting an + arbitrary frame in the video with ffmpeg can be painfully + slow if some decoding has to be done. This function tries + to avoid fectching arbitrary frames whenever possible, by + moving between adjacent frames.""" + # Modulo index (for looping) + if self._arg_loop and self._nframes < float("inf"): + index %= self._nframes + + if index == self._pos: + return self._lastread, dict(new=False) + elif index < 0: + raise IndexError("Frame index must be >= 0") + elif index >= self._nframes: + raise IndexError("Reached end of video") + else: + if (index < self._pos) or (index > self._pos + 100): + self._initialize(index) + else: + self._skip_frames(index - self._pos - 1) + result, is_new = self._read_frame() + self._pos = index + return result, dict(new=is_new) + + def _get_meta_data(self, index): + return self._meta + + def _initialize(self, index=0): + # Close the current generator, and thereby terminate its subprocess + if self._read_gen is not None: + self._read_gen.close() + + iargs = [] + oargs = [] + + # Create input args + iargs += self._arg_input_params + if self.request._video: + iargs += ["-f", CAM_FORMAT] + if self._arg_pixelformat: + iargs += ["-pix_fmt", self._arg_pixelformat] + if self._arg_size: + iargs += ["-s", self._arg_size] + elif index > 0: # re-initialize / seek + # Note: only works if we initialized earlier, and now have meta + # Some info here: https://trac.ffmpeg.org/wiki/Seeking + # There are two ways to seek, one before -i (input_params) and + # after (output_params). The former is fast, because it uses + # keyframes, the latter is slow but accurate. According to + # the article above, the fast method should also be accurate + # from ffmpeg version 2.1, however in version 4.1 our tests + # start failing again. Not sure why, but we can solve this + # by combining slow and fast. Seek the long stretch using + # the fast method, and seek the last 10s the slow way. + starttime = index / self._meta["fps"] + seek_slow = min(10, starttime) + seek_fast = starttime - seek_slow + # We used to have this epsilon earlier, when we did not use + # the slow seek. I don't think we need it anymore. + # epsilon = -1 / self._meta["fps"] * 0.1 + iargs += ["-ss", "%.06f" % (seek_fast)] + oargs += ["-ss", "%.06f" % (seek_slow)] + + # Output args, for writing to pipe + if self._arg_size: + oargs += ["-s", self._arg_size] + if self.request.kwargs.get("fps", None): + fps = float(self.request.kwargs["fps"]) + oargs += ["-r", "%.02f" % fps] + oargs += self._arg_output_params + + # Get pixelformat and bytes per pixel + pix_fmt = self._pix_fmt + bpp = self._depth * self._bytes_per_channel + + # Create generator + rf = self._ffmpeg_api.read_frames + self._read_gen = rf( + self._filename, pix_fmt, bpp, input_params=iargs, output_params=oargs + ) + + # Read meta data. This start the generator (and ffmpeg subprocess) + if self.request._video: + # With cameras, catch error and turn into IndexError + try: + meta = self._read_gen.__next__() + except IOError as err: + err_text = str(err) + if "darwin" in sys.platform: + if "Unknown input format: 'avfoundation'" in err_text: + err_text += ( + "Try installing FFMPEG using " + "home brew to get a version with " + "support for cameras." + ) + raise IndexError( + "No (working) camera at {}.\n\n{}".format( + self.request._video, err_text + ) + ) + else: + self._meta.update(meta) + elif index == 0: + self._meta.update(self._read_gen.__next__()) + else: + self._read_gen.__next__() # we already have meta data + + def _skip_frames(self, n=1): + """Reads and throws away n frames""" + for i in range(n): + self._read_gen.__next__() + self._pos += n + + def _read_frame(self): + # Read and convert to numpy array + w, h = self._meta["size"] + framesize = w * h * self._depth * self._bytes_per_channel + # t0 = time.time() + + # Read frame + if self._frame_catcher: # pragma: no cover - camera thing + s, is_new = self._frame_catcher.get_frame() + else: + s = self._read_gen.__next__() + is_new = True + + # Check + if len(s) != framesize: + raise RuntimeError( + "Frame is %i bytes, but expected %i." % (len(s), framesize) + ) + + result = np.frombuffer(s, dtype=self._dtype).copy() + result = result.reshape((h, w, self._depth)) + # t1 = time.time() + # print('etime', t1-t0) + + # Store and return + self._lastread = result + return result, is_new + + # -- + + class Writer(Format.Writer): + _write_gen = None + + def _open( + self, + fps=10, + codec="libx264", + bitrate=None, + pixelformat="yuv420p", + ffmpeg_params=None, + input_params=None, + output_params=None, + ffmpeg_log_level="quiet", + quality=5, + macro_block_size=16, + audio_path=None, + audio_codec=None, + ): + self._ffmpeg_api = imageio_ffmpeg + self._filename = self.request.get_local_filename() + self._pix_fmt = None + self._depth = None + self._size = None + + def _close(self): + if self._write_gen is not None: + self._write_gen.close() + self._write_gen = None + + def _append_data(self, im, meta): + # Get props of image + h, w = im.shape[:2] + size = w, h + depth = 1 if im.ndim == 2 else im.shape[2] + + # Ensure that image is in uint8 + im = image_as_uint(im, bitdepth=8) + # To be written efficiently, ie. without creating an immutable + # buffer, by calling im.tobytes() the array must be contiguous. + if not im.flags.c_contiguous: + # checkign the flag is a micro optimization. + # the image will be a numpy subclass. See discussion + # https://github.com/numpy/numpy/issues/11804 + im = np.ascontiguousarray(im) + + # Set size and initialize if not initialized yet + if self._size is None: + map = {1: "gray", 2: "gray8a", 3: "rgb24", 4: "rgba"} + self._pix_fmt = map.get(depth, None) + if self._pix_fmt is None: + raise ValueError("Image must have 1, 2, 3 or 4 channels") + self._size = size + self._depth = depth + self._initialize() + + # Check size of image + if size != self._size: + raise ValueError("All images in a movie should have same size") + if depth != self._depth: + raise ValueError( + "All images in a movie should have same " "number of channels" + ) + + assert self._write_gen is not None # Check status + + # Write. Yes, we can send the data in as a numpy array + self._write_gen.send(im) + + def set_meta_data(self, meta): + raise RuntimeError( + "The ffmpeg format does not support setting " "meta data." + ) + + def _initialize(self): + # Close existing generator + if self._write_gen is not None: + self._write_gen.close() + + # Get parameters + # Use None to let imageio-ffmpeg (or ffmpeg) select good results + fps = self.request.kwargs.get("fps", 10) + codec = self.request.kwargs.get("codec", None) + bitrate = self.request.kwargs.get("bitrate", None) + quality = self.request.kwargs.get("quality", None) + input_params = self.request.kwargs.get("input_params") or [] + output_params = self.request.kwargs.get("output_params") or [] + output_params += self.request.kwargs.get("ffmpeg_params") or [] + pixelformat = self.request.kwargs.get("pixelformat", None) + macro_block_size = self.request.kwargs.get("macro_block_size", 16) + ffmpeg_log_level = self.request.kwargs.get("ffmpeg_log_level", None) + audio_path = self.request.kwargs.get("audio_path", None) + audio_codec = self.request.kwargs.get("audio_codec", None) + + macro_block_size = macro_block_size or 1 # None -> 1 + + # Create generator + self._write_gen = self._ffmpeg_api.write_frames( + self._filename, + self._size, + pix_fmt_in=self._pix_fmt, + pix_fmt_out=pixelformat, + fps=fps, + quality=quality, + bitrate=bitrate, + codec=codec, + macro_block_size=macro_block_size, + ffmpeg_log_level=ffmpeg_log_level, + input_params=input_params, + output_params=output_params, + audio_path=audio_path, + audio_codec=audio_codec, + ) + + # Seed the generator (this is where the ffmpeg subprocess starts) + self._write_gen.send(None) + + +class FrameCatcher(threading.Thread): + """Thread to keep reading the frame data from stdout. This is + useful when streaming from a webcam. Otherwise, if the user code + does not grab frames fast enough, the buffer will fill up, leading + to lag, and ffmpeg can also stall (experienced on Linux). The + get_frame() method always returns the last available image. + """ + + def __init__(self, gen): + self._gen = gen + self._frame = None + self._frame_is_new = False + self._lock = threading.RLock() + threading.Thread.__init__(self) + self.daemon = True # do not let this thread hold up Python shutdown + self._should_stop = False + self.start() + + def stop_me(self): + self._should_stop = True + while self.is_alive(): + time.sleep(0.001) + + def get_frame(self): + while self._frame is None: # pragma: no cover - an init thing + time.sleep(0.001) + with self._lock: + is_new = self._frame_is_new + self._frame_is_new = False # reset + return self._frame, is_new + + def run(self): + # This runs in the worker thread + try: + while not self._should_stop: + time.sleep(0) # give control to other threads + frame = self._gen.__next__() + with self._lock: + self._frame = frame + self._frame_is_new = True + except (StopIteration, EOFError): + pass + + +def parse_device_names(ffmpeg_output): + """Parse the output of the ffmpeg -list-devices command""" + # Collect device names - get [friendly_name, alt_name] of each + device_names = [] + in_video_devices = False + for line in ffmpeg_output.splitlines(): + if line.startswith("[dshow"): + logger.debug(line) + line = line.split("]", 1)[1].strip() + if in_video_devices and line.startswith('"'): + friendly_name = line[1:-1] + device_names.append([friendly_name, ""]) + elif in_video_devices and line.lower().startswith("alternative name"): + alt_name = line.split(" name ", 1)[1].strip()[1:-1] + if sys.platform.startswith("win"): + alt_name = alt_name.replace("&", "^&") # Tested to work + else: + alt_name = alt_name.replace("&", "\\&") # Does this work? + device_names[-1][-1] = alt_name + elif "video devices" in line: + in_video_devices = True + elif "devices" in line: + # set False for subsequent "devices" sections + in_video_devices = False + # Post-process, see #441 + # prefer friendly names, use alt name if two cams have same friendly name + device_names2 = [] + for friendly_name, alt_name in device_names: + if friendly_name not in device_names2: + device_names2.append(friendly_name) + elif alt_name: + device_names2.append(alt_name) + else: + device_names2.append(friendly_name) # duplicate, but not much we can do + return device_names2 diff --git a/.venv/Lib/site-packages/imageio/plugins/fits.py b/.venv/Lib/site-packages/imageio/plugins/fits.py new file mode 100644 index 00000000..4617d1ea --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/fits.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +"""Read FITS files. + +Backend Library: `Astropy `_ + +.. note:: + To use this plugin you have to install its backend:: + + pip install imageio[fits] + +Flexible Image Transport System (FITS) is an open standard defining a +digital file format useful for storage, transmission and processing of +scientific and other images. FITS is the most commonly used digital +file format in astronomy. + + +Parameters +---------- +cache : bool + If the file name is a URL, `~astropy.utils.data.download_file` is used + to open the file. This specifies whether or not to save the file + locally in Astropy's download cache (default: `True`). +uint : bool + Interpret signed integer data where ``BZERO`` is the + central value and ``BSCALE == 1`` as unsigned integer + data. For example, ``int16`` data with ``BZERO = 32768`` + and ``BSCALE = 1`` would be treated as ``uint16`` data. + + Note, for backward compatibility, the kwarg **uint16** may + be used instead. The kwarg was renamed when support was + added for integers of any size. +ignore_missing_end : bool + Do not issue an exception when opening a file that is + missing an ``END`` card in the last header. +checksum : bool or str + If `True`, verifies that both ``DATASUM`` and + ``CHECKSUM`` card values (when present in the HDU header) + match the header and data of all HDU's in the file. Updates to a + file that already has a checksum will preserve and update the + existing checksums unless this argument is given a value of + 'remove', in which case the CHECKSUM and DATASUM values are not + checked, and are removed when saving changes to the file. +disable_image_compression : bool, optional + If `True`, treats compressed image HDU's like normal + binary table HDU's. +do_not_scale_image_data : bool + If `True`, image data is not scaled using BSCALE/BZERO values + when read. +ignore_blank : bool + If `True`, the BLANK keyword is ignored if present. +scale_back : bool + If `True`, when saving changes to a file that contained scaled + image data, restore the data to the original type and reapply the + original BSCALE/BZERO values. This could lead to loss of accuracy + if scaling back to integer values after performing floating point + operations on the data. + +""" + +from ..core import Format + +_fits = None # lazily loaded + + +def load_lib(): + global _fits + try: + from astropy.io import fits as _fits + except ImportError: + raise ImportError( + "The FITS format relies on the astropy package." + "Please refer to http://www.astropy.org/ " + "for further instructions." + ) + return _fits + + +class FitsFormat(Format): + """See :mod:`imageio.plugins.fits`""" + + def _can_read(self, request): + # We return True if ext matches, because this is the only plugin + # that can. If astropy is not installed, a useful error follows. + return request.extension in self.extensions + + def _can_write(self, request): + # No write support + return False + + # -- reader + + class Reader(Format.Reader): + def _open(self, cache=False, **kwargs): + if not _fits: + load_lib() + hdulist = _fits.open(self.request.get_file(), cache=cache, **kwargs) + + self._index = [] + allowed_hdu_types = (_fits.ImageHDU, _fits.PrimaryHDU, _fits.CompImageHDU) + for n, hdu in zip(range(len(hdulist)), hdulist): + if isinstance(hdu, allowed_hdu_types): + # Ignore (primary) header units with no data (use '.size' + # rather than '.data' to avoid actually loading the image): + if hdu.size > 0: + self._index.append(n) + self._hdulist = hdulist + + def _close(self): + self._hdulist.close() + + def _get_length(self): + return len(self._index) + + def _get_data(self, index): + # Get data + if index < 0 or index >= len(self._index): + raise IndexError("Index out of range while reading from fits") + im = self._hdulist[self._index[index]].data + # Return array and empty meta data + return im, {} + + def _get_meta_data(self, index): + # Get the meta data for the given index + raise RuntimeError("The fits format does not support meta data.") diff --git a/.venv/Lib/site-packages/imageio/plugins/freeimage.py b/.venv/Lib/site-packages/imageio/plugins/freeimage.py new file mode 100644 index 00000000..922899f8 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/freeimage.py @@ -0,0 +1,404 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +"""Read/Write images using FreeImage. + +Backend Library: `FreeImage `_ + +.. note:: + To use this plugin you have to install its backend:: + + imageio_download_bin freeimage + + or you can download the backend using the function:: + + imageio.plugins.freeimage.download() + +Each Freeimage format has the ``flags`` keyword argument. See the `Freeimage +documentation `_ for more information. + +Parameters +---------- +flags : int + A freeimage-specific option. In most cases we provide explicit + parameters for influencing image reading. + +""" + +import numpy as np + +from ..core import Format, image_as_uint +from ..core.request import RETURN_BYTES +from ._freeimage import FNAME_PER_PLATFORM, IO_FLAGS, download, fi # noqa + +# todo: support files with only meta data + + +class FreeimageFormat(Format): + """See :mod:`imageio.plugins.freeimage`""" + + _modes = "i" + + def __init__(self, name, description, extensions=None, modes=None, *, fif=None): + super().__init__(name, description, extensions=extensions, modes=modes) + self._fif = fif + + @property + def fif(self): + return self._fif # Set when format is created + + def _can_read(self, request): + # Ask freeimage if it can read it, maybe ext missing + if fi.has_lib(): + if not hasattr(request, "_fif"): + try: + request._fif = fi.getFIF(request.filename, "r", request.firstbytes) + except Exception: # pragma: no cover + request._fif = -1 + if request._fif == self.fif: + return True + elif request._fif == 7 and self.fif == 14: + # PPM gets identified as PBM and PPM can read PBM + # see: https://github.com/imageio/imageio/issues/677 + return True + + def _can_write(self, request): + # Ask freeimage, because we are not aware of all formats + if fi.has_lib(): + if not hasattr(request, "_fif"): + try: + request._fif = fi.getFIF(request.filename, "w") + except ValueError: # pragma: no cover + if request.raw_uri == RETURN_BYTES: + request._fif = self.fif + else: + request._fif = -1 + if request._fif is self.fif: + return True + + # -- + + class Reader(Format.Reader): + def _get_length(self): + return 1 + + def _open(self, flags=0): + self._bm = fi.create_bitmap(self.request.filename, self.format.fif, flags) + self._bm.load_from_filename(self.request.get_local_filename()) + + def _close(self): + self._bm.close() + + def _get_data(self, index): + if index != 0: + raise IndexError("This format only supports singleton images.") + return self._bm.get_image_data(), self._bm.get_meta_data() + + def _get_meta_data(self, index): + if not (index is None or index == 0): + raise IndexError() + return self._bm.get_meta_data() + + # -- + + class Writer(Format.Writer): + def _open(self, flags=0): + self._flags = flags # Store flags for later use + self._bm = None + self._is_set = False # To prevent appending more than one image + self._meta = {} + + def _close(self): + # Set global meta data + self._bm.set_meta_data(self._meta) + # Write and close + self._bm.save_to_filename(self.request.get_local_filename()) + self._bm.close() + + def _append_data(self, im, meta): + # Check if set + if not self._is_set: + self._is_set = True + else: + raise RuntimeError( + "Singleton image; " "can only append image data once." + ) + # Pop unit dimension for grayscale images + if im.ndim == 3 and im.shape[-1] == 1: + im = im[:, :, 0] + # Lazy instantaion of the bitmap, we need image data + if self._bm is None: + self._bm = fi.create_bitmap( + self.request.filename, self.format.fif, self._flags + ) + self._bm.allocate(im) + # Set data + self._bm.set_image_data(im) + # There is no distinction between global and per-image meta data + # for singleton images + self._meta = meta + + def _set_meta_data(self, meta): + self._meta = meta + + +# Special plugins + +# todo: there is also FIF_LOAD_NOPIXELS, +# but perhaps that should be used with get_meta_data. + + +class FreeimageBmpFormat(FreeimageFormat): + """A BMP format based on the Freeimage library. + + This format supports grayscale, RGB and RGBA images. + + The freeimage plugin requires a `freeimage` binary. If this binary + not available on the system, it can be downloaded manually from + by either + + - the command line script ``imageio_download_bin freeimage`` + - the Python method ``imageio.plugins.freeimage.download()`` + + Parameters for saving + --------------------- + compression : bool + Whether to compress the bitmap using RLE when saving. Default False. + It seems this does not always work, but who cares, you should use + PNG anyway. + + """ + + class Writer(FreeimageFormat.Writer): + def _open(self, flags=0, compression=False): + # Build flags from kwargs + flags = int(flags) + if compression: + flags |= IO_FLAGS.BMP_SAVE_RLE + else: + flags |= IO_FLAGS.BMP_DEFAULT + # Act as usual, but with modified flags + return FreeimageFormat.Writer._open(self, flags) + + def _append_data(self, im, meta): + im = image_as_uint(im, bitdepth=8) + return FreeimageFormat.Writer._append_data(self, im, meta) + + +class FreeimagePngFormat(FreeimageFormat): + """A PNG format based on the Freeimage library. + + This format supports grayscale, RGB and RGBA images. + + The freeimage plugin requires a `freeimage` binary. If this binary + not available on the system, it can be downloaded manually from + by either + + - the command line script ``imageio_download_bin freeimage`` + - the Python method ``imageio.plugins.freeimage.download()`` + + Parameters for reading + ---------------------- + ignoregamma : bool + Avoid gamma correction. Default True. + + Parameters for saving + --------------------- + compression : {0, 1, 6, 9} + The compression factor. Higher factors result in more + compression at the cost of speed. Note that PNG compression is + always lossless. Default 9. + quantize : int + If specified, turn the given RGB or RGBA image in a paletted image + for more efficient storage. The value should be between 2 and 256. + If the value of 0 the image is not quantized. + interlaced : bool + Save using Adam7 interlacing. Default False. + """ + + class Reader(FreeimageFormat.Reader): + def _open(self, flags=0, ignoregamma=True): + # Build flags from kwargs + flags = int(flags) + if ignoregamma: + flags |= IO_FLAGS.PNG_IGNOREGAMMA + # Enter as usual, with modified flags + return FreeimageFormat.Reader._open(self, flags) + + # -- + + class Writer(FreeimageFormat.Writer): + def _open(self, flags=0, compression=9, quantize=0, interlaced=False): + compression_map = { + 0: IO_FLAGS.PNG_Z_NO_COMPRESSION, + 1: IO_FLAGS.PNG_Z_BEST_SPEED, + 6: IO_FLAGS.PNG_Z_DEFAULT_COMPRESSION, + 9: IO_FLAGS.PNG_Z_BEST_COMPRESSION, + } + # Build flags from kwargs + flags = int(flags) + if interlaced: + flags |= IO_FLAGS.PNG_INTERLACED + try: + flags |= compression_map[compression] + except KeyError: + raise ValueError("Png compression must be 0, 1, 6, or 9.") + # Act as usual, but with modified flags + return FreeimageFormat.Writer._open(self, flags) + + def _append_data(self, im, meta): + if str(im.dtype) == "uint16": + im = image_as_uint(im, bitdepth=16) + else: + im = image_as_uint(im, bitdepth=8) + FreeimageFormat.Writer._append_data(self, im, meta) + # Quantize? + q = int(self.request.kwargs.get("quantize", False)) + if not q: + pass + elif not (im.ndim == 3 and im.shape[-1] == 3): + raise ValueError("Can only quantize RGB images") + elif q < 2 or q > 256: + raise ValueError("PNG quantize param must be 2..256") + else: + bm = self._bm.quantize(0, q) + self._bm.close() + self._bm = bm + + +class FreeimageJpegFormat(FreeimageFormat): + """A JPEG format based on the Freeimage library. + + This format supports grayscale and RGB images. + + The freeimage plugin requires a `freeimage` binary. If this binary + not available on the system, it can be downloaded manually from + by either + + - the command line script ``imageio_download_bin freeimage`` + - the Python method ``imageio.plugins.freeimage.download()`` + + Parameters for reading + ---------------------- + exifrotate : bool + Automatically rotate the image according to the exif flag. + Default True. If 2 is given, do the rotation in Python instead + of freeimage. + quickread : bool + Read the image more quickly, at the expense of quality. + Default False. + + Parameters for saving + --------------------- + quality : scalar + The compression factor of the saved image (1..100), higher + numbers result in higher quality but larger file size. Default 75. + progressive : bool + Save as a progressive JPEG file (e.g. for images on the web). + Default False. + optimize : bool + On saving, compute optimal Huffman coding tables (can reduce a + few percent of file size). Default False. + baseline : bool + Save basic JPEG, without metadata or any markers. Default False. + + """ + + class Reader(FreeimageFormat.Reader): + def _open(self, flags=0, exifrotate=True, quickread=False): + # Build flags from kwargs + flags = int(flags) + if exifrotate and exifrotate != 2: + flags |= IO_FLAGS.JPEG_EXIFROTATE + if not quickread: + flags |= IO_FLAGS.JPEG_ACCURATE + # Enter as usual, with modified flags + return FreeimageFormat.Reader._open(self, flags) + + def _get_data(self, index): + im, meta = FreeimageFormat.Reader._get_data(self, index) + im = self._rotate(im, meta) + return im, meta + + def _rotate(self, im, meta): + """Use Orientation information from EXIF meta data to + orient the image correctly. Freeimage is also supposed to + support that, and I am pretty sure it once did, but now it + does not, so let's just do it in Python. + Edit: and now it works again, just leave in place as a fallback. + """ + if self.request.kwargs.get("exifrotate", None) == 2: + try: + ori = meta["EXIF_MAIN"]["Orientation"] + except KeyError: # pragma: no cover + pass # Orientation not available + else: # pragma: no cover - we cannot touch all cases + # www.impulseadventure.com/photo/exif-orientation.html + if ori in [1, 2]: + pass + if ori in [3, 4]: + im = np.rot90(im, 2) + if ori in [5, 6]: + im = np.rot90(im, 3) + if ori in [7, 8]: + im = np.rot90(im) + if ori in [2, 4, 5, 7]: # Flipped cases (rare) + im = np.fliplr(im) + return im + + # -- + + class Writer(FreeimageFormat.Writer): + def _open( + self, flags=0, quality=75, progressive=False, optimize=False, baseline=False + ): + # Test quality + quality = int(quality) + if quality < 1 or quality > 100: + raise ValueError("JPEG quality should be between 1 and 100.") + # Build flags from kwargs + flags = int(flags) + flags |= quality + if progressive: + flags |= IO_FLAGS.JPEG_PROGRESSIVE + if optimize: + flags |= IO_FLAGS.JPEG_OPTIMIZE + if baseline: + flags |= IO_FLAGS.JPEG_BASELINE + # Act as usual, but with modified flags + return FreeimageFormat.Writer._open(self, flags) + + def _append_data(self, im, meta): + if im.ndim == 3 and im.shape[-1] == 4: + raise IOError("JPEG does not support alpha channel.") + im = image_as_uint(im, bitdepth=8) + return FreeimageFormat.Writer._append_data(self, im, meta) + + +class FreeimagePnmFormat(FreeimageFormat): + """A PNM format based on the Freeimage library. + + This format supports single bit (PBM), grayscale (PGM) and RGB (PPM) + images, even with ASCII or binary coding. + + The freeimage plugin requires a `freeimage` binary. If this binary + not available on the system, it can be downloaded manually from + by either + + - the command line script ``imageio_download_bin freeimage`` + - the Python method ``imageio.plugins.freeimage.download()`` + + Parameters for saving + --------------------- + use_ascii : bool + Save with ASCII coding. Default True. + """ + + class Writer(FreeimageFormat.Writer): + def _open(self, flags=0, use_ascii=True): + # Build flags from kwargs + flags = int(flags) + if use_ascii: + flags |= IO_FLAGS.PNM_SAVE_ASCII + # Act as usual, but with modified flags + return FreeimageFormat.Writer._open(self, flags) diff --git a/.venv/Lib/site-packages/imageio/plugins/freeimagemulti.py b/.venv/Lib/site-packages/imageio/plugins/freeimagemulti.py new file mode 100644 index 00000000..bad53d40 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/freeimagemulti.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +"""Plugin for multi-image freeimafe formats, like animated GIF and ico. +""" + +import logging +import numpy as np + +from ..core import Format, image_as_uint +from ._freeimage import fi, IO_FLAGS +from .freeimage import FreeimageFormat + +logger = logging.getLogger(__name__) + + +class FreeimageMulti(FreeimageFormat): + """Base class for freeimage formats that support multiple images.""" + + _modes = "iI" + _fif = -1 + + class Reader(Format.Reader): + def _open(self, flags=0): + flags = int(flags) + # Create bitmap + self._bm = fi.create_multipage_bitmap( + self.request.filename, self.format.fif, flags + ) + self._bm.load_from_filename(self.request.get_local_filename()) + + def _close(self): + self._bm.close() + + def _get_length(self): + return len(self._bm) + + def _get_data(self, index): + sub = self._bm.get_page(index) + try: + return sub.get_image_data(), sub.get_meta_data() + finally: + sub.close() + + def _get_meta_data(self, index): + index = index or 0 + if index < 0 or index >= len(self._bm): + raise IndexError() + sub = self._bm.get_page(index) + try: + return sub.get_meta_data() + finally: + sub.close() + + # -- + + class Writer(FreeimageFormat.Writer): + def _open(self, flags=0): + # Set flags + self._flags = flags = int(flags) + # Instantiate multi-page bitmap + self._bm = fi.create_multipage_bitmap( + self.request.filename, self.format.fif, flags + ) + self._bm.save_to_filename(self.request.get_local_filename()) + + def _close(self): + # Close bitmap + self._bm.close() + + def _append_data(self, im, meta): + # Prepare data + if im.ndim == 3 and im.shape[-1] == 1: + im = im[:, :, 0] + im = image_as_uint(im, bitdepth=8) + # Create sub bitmap + sub1 = fi.create_bitmap(self._bm._filename, self.format.fif) + # Let subclass add data to bitmap, optionally return new + sub2 = self._append_bitmap(im, meta, sub1) + # Add + self._bm.append_bitmap(sub2) + sub2.close() + if sub1 is not sub2: + sub1.close() + + def _append_bitmap(self, im, meta, bitmap): + # Set data + bitmap.allocate(im) + bitmap.set_image_data(im) + bitmap.set_meta_data(meta) + # Return that same bitmap + return bitmap + + def _set_meta_data(self, meta): + pass # ignore global meta data + + +class MngFormat(FreeimageMulti): + """An Mng format based on the Freeimage library. + + Read only. Seems broken. + """ + + _fif = 6 + + def _can_write(self, request): # pragma: no cover + return False + + +class IcoFormat(FreeimageMulti): + """An ICO format based on the Freeimage library. + + This format supports grayscale, RGB and RGBA images. + + The freeimage plugin requires a `freeimage` binary. If this binary + is not available on the system, it can be downloaded by either + + - the command line script ``imageio_download_bin freeimage`` + - the Python method ``imageio.plugins.freeimage.download()`` + + Parameters for reading + ---------------------- + makealpha : bool + Convert to 32-bit and create an alpha channel from the AND- + mask when loading. Default False. Note that this returns wrong + results if the image was already RGBA. + + """ + + _fif = 1 + + class Reader(FreeimageMulti.Reader): + def _open(self, flags=0, makealpha=False): + # Build flags from kwargs + flags = int(flags) + if makealpha: + flags |= IO_FLAGS.ICO_MAKEALPHA + return FreeimageMulti.Reader._open(self, flags) + + +class GifFormat(FreeimageMulti): + """A format for reading and writing static and animated GIF, based + on the Freeimage library. + + Images read with this format are always RGBA. Currently, + the alpha channel is ignored when saving RGB images with this + format. + + The freeimage plugin requires a `freeimage` binary. If this binary + is not available on the system, it can be downloaded by either + + - the command line script ``imageio_download_bin freeimage`` + - the Python method ``imageio.plugins.freeimage.download()`` + + Parameters for reading + ---------------------- + playback : bool + 'Play' the GIF to generate each frame (as 32bpp) instead of + returning raw frame data when loading. Default True. + + Parameters for saving + --------------------- + loop : int + The number of iterations. Default 0 (meaning loop indefinitely) + duration : {float, list} + The duration (in seconds) of each frame. Either specify one value + that is used for all frames, or one value for each frame. + Note that in the GIF format the duration/delay is expressed in + hundredths of a second, which limits the precision of the duration. + fps : float + The number of frames per second. If duration is not given, the + duration for each frame is set to 1/fps. Default 10. + palettesize : int + The number of colors to quantize the image to. Is rounded to + the nearest power of two. Default 256. + quantizer : {'wu', 'nq'} + The quantization algorithm: + * wu - Wu, Xiaolin, Efficient Statistical Computations for + Optimal Color Quantization + * nq (neuqant) - Dekker A. H., Kohonen neural networks for + optimal color quantization + subrectangles : bool + If True, will try and optimize the GIF by storing only the + rectangular parts of each frame that change with respect to the + previous. Unfortunately, this option seems currently broken + because FreeImage does not handle DisposalMethod correctly. + Default False. + """ + + _fif = 25 + + class Reader(FreeimageMulti.Reader): + def _open(self, flags=0, playback=True): + # Build flags from kwargs + flags = int(flags) + if playback: + flags |= IO_FLAGS.GIF_PLAYBACK + FreeimageMulti.Reader._open(self, flags) + + def _get_data(self, index): + im, meta = FreeimageMulti.Reader._get_data(self, index) + # im = im[:, :, :3] # Drop alpha channel + return im, meta + + # -- writer + + class Writer(FreeimageMulti.Writer): + # todo: subrectangles + # todo: global palette + + def _open( + self, + flags=0, + loop=0, + duration=None, + fps=10, + palettesize=256, + quantizer="Wu", + subrectangles=False, + ): + # Check palettesize + if palettesize < 2 or palettesize > 256: + raise ValueError("GIF quantize param must be 2..256") + if palettesize not in [2, 4, 8, 16, 32, 64, 128, 256]: + palettesize = 2 ** int(np.log2(128) + 0.999) + logger.warning( + "Warning: palettesize (%r) modified to a factor of " + "two between 2-256." % palettesize + ) + self._palettesize = palettesize + # Check quantizer + self._quantizer = {"wu": 0, "nq": 1}.get(quantizer.lower(), None) + if self._quantizer is None: + raise ValueError('Invalid quantizer, must be "wu" or "nq".') + # Check frametime + if duration is None: + self._frametime = [int(1000 / float(fps) + 0.5)] + elif isinstance(duration, list): + self._frametime = [int(1000 * d) for d in duration] + elif isinstance(duration, (float, int)): + self._frametime = [int(1000 * duration)] + else: + raise ValueError("Invalid value for duration: %r" % duration) + # Check subrectangles + self._subrectangles = bool(subrectangles) + self._prev_im = None + # Init + FreeimageMulti.Writer._open(self, flags) + # Set global meta data + self._meta = {} + self._meta["ANIMATION"] = { + # 'GlobalPalette': np.array([0]).astype(np.uint8), + "Loop": np.array([loop]).astype(np.uint32), + # 'LogicalWidth': np.array([x]).astype(np.uint16), + # 'LogicalHeight': np.array([x]).astype(np.uint16), + } + + def _append_bitmap(self, im, meta, bitmap): + # Prepare meta data + meta = meta.copy() + meta_a = meta["ANIMATION"] = {} + # If this is the first frame, assign it our "global" meta data + if len(self._bm) == 0: + meta.update(self._meta) + meta_a = meta["ANIMATION"] + # Set frame time + index = len(self._bm) + if index < len(self._frametime): + ft = self._frametime[index] + else: + ft = self._frametime[-1] + meta_a["FrameTime"] = np.array([ft]).astype(np.uint32) + # Check array + if im.ndim == 3 and im.shape[-1] == 4: + im = im[:, :, :3] + # Process subrectangles + im_uncropped = im + if self._subrectangles and self._prev_im is not None: + im, xy = self._get_sub_rectangles(self._prev_im, im) + meta_a["DisposalMethod"] = np.array([1]).astype(np.uint8) + meta_a["FrameLeft"] = np.array([xy[0]]).astype(np.uint16) + meta_a["FrameTop"] = np.array([xy[1]]).astype(np.uint16) + self._prev_im = im_uncropped + # Set image data + sub2 = sub1 = bitmap + sub1.allocate(im) + sub1.set_image_data(im) + # Quantize it if its RGB + if im.ndim == 3 and im.shape[-1] == 3: + sub2 = sub1.quantize(self._quantizer, self._palettesize) + # Set meta data and return + sub2.set_meta_data(meta) + return sub2 + + def _get_sub_rectangles(self, prev, im): + """ + Calculate the minimal rectangles that need updating each frame. + Returns a two-element tuple containing the cropped images and a + list of x-y positions. + """ + # Get difference, sum over colors + diff = np.abs(im - prev) + if diff.ndim == 3: + diff = diff.sum(2) + # Get begin and end for both dimensions + X = np.argwhere(diff.sum(0)) + Y = np.argwhere(diff.sum(1)) + # Get rect coordinates + if X.size and Y.size: + x0, x1 = int(X[0]), int(X[-1]) + 1 + y0, y1 = int(Y[0]), int(Y[-1]) + 1 + else: # No change ... make it minimal + x0, x1 = 0, 2 + y0, y1 = 0, 2 + # Cut out and return + return im[y0:y1, x0:x1], (x0, y0) diff --git a/.venv/Lib/site-packages/imageio/plugins/gdal.py b/.venv/Lib/site-packages/imageio/plugins/gdal.py new file mode 100644 index 00000000..04cabb7e --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/gdal.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read GDAL files. + +Backend: `GDAL `_ + +.. note:: + To use this plugin you have to install its backend:: + + pip install imageio[gdal] + +Parameters +---------- +none +""" + +from ..core import Format, has_module + +_gdal = None # lazily loaded in load_lib() + + +def load_lib(): + global _gdal + try: + import osgeo.gdal as _gdal + except ImportError: + raise ImportError( + "The GDAL format relies on the GDAL package." + "Please refer to http://www.gdal.org/" + "for further instructions." + ) + return _gdal + + +GDAL_FORMATS = (".tiff", " .tif", ".img", ".ecw", ".jpg", ".jpeg") + + +class GdalFormat(Format): + """See :mod:`imageio.plugins.gdal`""" + + def _can_read(self, request): + if request.extension in (".ecw",): + return True + if has_module("osgeo.gdal"): + return request.extension in self.extensions + + def _can_write(self, request): + return False + + # -- + + class Reader(Format.Reader): + def _open(self): + if not _gdal: + load_lib() + self._ds = _gdal.Open(self.request.get_local_filename()) + + def _close(self): + del self._ds + + def _get_length(self): + return 1 + + def _get_data(self, index): + if index != 0: + raise IndexError("Gdal file contains only one dataset") + return self._ds.ReadAsArray(), self._get_meta_data(index) + + def _get_meta_data(self, index): + return self._ds.GetMetadata() diff --git a/.venv/Lib/site-packages/imageio/plugins/grab.py b/.venv/Lib/site-packages/imageio/plugins/grab.py new file mode 100644 index 00000000..8477863e --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/grab.py @@ -0,0 +1,105 @@ +""" +PIL-based formats to take screenshots and grab from the clipboard. +""" + +import threading + +import numpy as np + +from ..core import Format + + +class BaseGrabFormat(Format): + """Base format for grab formats.""" + + _pillow_imported = False + _ImageGrab = None + + def __init__(self, *args, **kwargs): + super(BaseGrabFormat, self).__init__(*args, **kwargs) + self._lock = threading.RLock() + + def _can_write(self, request): + return False + + def _init_pillow(self): + with self._lock: + if not self._pillow_imported: + self._pillow_imported = True # more like tried to import + import PIL + + if not hasattr(PIL, "__version__"): # pragma: no cover + raise ImportError("Imageio Pillow requires " "Pillow, not PIL!") + try: + from PIL import ImageGrab + except ImportError: + return None + self._ImageGrab = ImageGrab + return self._ImageGrab + + class Reader(Format.Reader): + def _open(self): + pass + + def _close(self): + pass + + def _get_data(self, index): + return self.format._get_data(index) + + +class ScreenGrabFormat(BaseGrabFormat): + """The ScreenGrabFormat provided a means to grab screenshots using + the uri of "". + + This functionality is provided via Pillow. Note that "" is + only supported on Windows and OS X. + + Parameters for reading + ---------------------- + No parameters. + """ + + def _can_read(self, request): + if request.filename != "": + return False + return bool(self._init_pillow()) + + def _get_data(self, index): + ImageGrab = self._init_pillow() + assert ImageGrab + + pil_im = ImageGrab.grab() + assert pil_im is not None + im = np.asarray(pil_im) + return im, {} + + +class ClipboardGrabFormat(BaseGrabFormat): + """The ClipboardGrabFormat provided a means to grab image data from + the clipboard, using the uri "" + + This functionality is provided via Pillow. Note that "" is + only supported on Windows. + + Parameters for reading + ---------------------- + No parameters. + """ + + def _can_read(self, request): + if request.filename != "": + return False + return bool(self._init_pillow()) + + def _get_data(self, index): + ImageGrab = self._init_pillow() + assert ImageGrab + + pil_im = ImageGrab.grabclipboard() + if pil_im is None: + raise RuntimeError( + "There seems to be no image data on the " "clipboard now." + ) + im = np.asarray(pil_im) + return im, {} diff --git a/.venv/Lib/site-packages/imageio/plugins/lytro.py b/.venv/Lib/site-packages/imageio/plugins/lytro.py new file mode 100644 index 00000000..add38ad9 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/lytro.py @@ -0,0 +1,714 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2018, imageio contributors +# imageio is distributed under the terms of the (new) BSD License. +# + +""" Read LFR files (Lytro Illum). + +Backend: internal + +Plugin to read Lytro Illum .lfr and .raw files as produced +by the Lytro Illum light field camera. It is actually a collection +of plugins, each supporting slightly different keyword arguments + +Parameters +---------- +meta_only : bool + Whether to only read the metadata. +include_thumbnail : bool + (only for lytro-lfr and lytro-lfp) + Whether to include an image thumbnail in the metadata. + +""" +# +# +# This code is based on work by +# David Uhlig and his lfr_reader +# (https://www.iiit.kit.edu/uhlig.php) +# Donald Dansereau and his Matlab LF Toolbox +# (http://dgd.vision/Tools/LFToolbox/) +# and Behnam Esfahbod and his Python LFP-Reader +# (https://github.com/behnam/python-lfp-reader/) + + +import os +import json +import struct +import logging + + +import numpy as np + +from ..core import Format +from ..v2 import imread + + +logger = logging.getLogger(__name__) + + +# Sensor size of Lytro Illum resp. Lytro F01 light field camera sensor +LYTRO_ILLUM_IMAGE_SIZE = (5368, 7728) +LYTRO_F01_IMAGE_SIZE = (3280, 3280) + +# Parameter of lfr file format +HEADER_LENGTH = 12 +SIZE_LENGTH = 4 # = 16 - header_length +SHA1_LENGTH = 45 # = len("sha1-") + (160 / 4) +PADDING_LENGTH = 35 # = (4*16) - header_length - size_length - sha1_length +DATA_CHUNKS_ILLUM = 11 +DATA_CHUNKS_F01 = 3 + + +class LytroFormat(Format): + """Base class for Lytro format. + The subclasses LytroLfrFormat, LytroLfpFormat, LytroIllumRawFormat and + LytroF01RawFormat implement the Lytro-LFR, Lytro-LFP and Lytro-RAW format + for the Illum and original F01 camera respectively. + Writing is not supported. + """ + + # Only single images are supported. + _modes = "i" + + def _can_write(self, request): + # Writing of Lytro files is not supported + return False + + # -- writer + + class Writer(Format.Writer): + def _open(self, flags=0): + self._fp = self.request.get_file() + + def _close(self): + # Close the reader. + # Note that the request object will close self._fp + pass + + def _append_data(self, im, meta): + # Process the given data and meta data. + raise RuntimeError("The lytro format cannot write image data.") + + def _set_meta_data(self, meta): + # Process the given meta data (global for all images) + # It is not mandatory to support this. + raise RuntimeError("The lytro format cannot write meta data.") + + +class LytroIllumRawFormat(LytroFormat): + """This is the Lytro Illum RAW format. + The raw format is a 10bit image format as used by the Lytro Illum + light field camera. The format will read the specified raw file and will + try to load a .txt or .json file with the associated meta data. + This format does not support writing. + + + Parameters for reading + ---------------------- + meta_only : bool + Whether to only read the metadata. + """ + + def _can_read(self, request): + # Check if mode and extensions are supported by the format + if request.extension in (".raw",): + return True + + @staticmethod + def rearrange_bits(array): + # Do bit rearrangement for the 10-bit lytro raw format + # Normalize output to 1.0 as float64 + t0 = array[0::5] + t1 = array[1::5] + t2 = array[2::5] + t3 = array[3::5] + lsb = array[4::5] + + t0 = np.left_shift(t0, 2) + np.bitwise_and(lsb, 3) + t1 = np.left_shift(t1, 2) + np.right_shift(np.bitwise_and(lsb, 12), 2) + t2 = np.left_shift(t2, 2) + np.right_shift(np.bitwise_and(lsb, 48), 4) + t3 = np.left_shift(t3, 2) + np.right_shift(np.bitwise_and(lsb, 192), 6) + + image = np.zeros(LYTRO_ILLUM_IMAGE_SIZE, dtype=np.uint16) + image[:, 0::4] = t0.reshape( + (LYTRO_ILLUM_IMAGE_SIZE[0], LYTRO_ILLUM_IMAGE_SIZE[1] // 4) + ) + image[:, 1::4] = t1.reshape( + (LYTRO_ILLUM_IMAGE_SIZE[0], LYTRO_ILLUM_IMAGE_SIZE[1] // 4) + ) + image[:, 2::4] = t2.reshape( + (LYTRO_ILLUM_IMAGE_SIZE[0], LYTRO_ILLUM_IMAGE_SIZE[1] // 4) + ) + image[:, 3::4] = t3.reshape( + (LYTRO_ILLUM_IMAGE_SIZE[0], LYTRO_ILLUM_IMAGE_SIZE[1] // 4) + ) + + # Normalize data to 1.0 as 64-bit float. + # Division is by 1023 as the Lytro Illum saves 10-bit raw data. + return np.divide(image, 1023.0).astype(np.float64) + + # -- reader + + class Reader(Format.Reader): + def _open(self, meta_only=False): + self._file = self.request.get_file() + self._data = None + self._meta_only = meta_only + + def _close(self): + # Close the reader. + # Note that the request object will close self._file + del self._data + + def _get_length(self): + # Return the number of images. + return 1 + + def _get_data(self, index): + # Return the data and meta data for the given index + + if index not in [0, "None"]: + raise IndexError("Lytro file contains only one dataset") + + if not self._meta_only: + # Read all bytes + if self._data is None: + self._data = self._file.read() + + # Read bytes from string and convert to uint16 + raw = np.frombuffer(self._data, dtype=np.uint8).astype(np.uint16) + + # Rearrange bits + img = LytroIllumRawFormat.rearrange_bits(raw) + + else: + # Return empty image + img = np.array([]) + + # Return image and meta data + return img, self._get_meta_data(index=0) + + def _get_meta_data(self, index): + # Get the meta data for the given index. If index is None, it + # should return the global meta data. + + if index not in [0, None]: + raise IndexError("Lytro meta data file contains only one dataset") + + # Try to read meta data from meta data file corresponding + # to the raw data file, extension in [.txt, .TXT, .json, .JSON] + filename_base = os.path.splitext(self.request.get_local_filename())[0] + + meta_data = None + + for ext in [".txt", ".TXT", ".json", ".JSON"]: + if os.path.isfile(filename_base + ext): + meta_data = json.load(open(filename_base + ext)) + + if meta_data is not None: + return meta_data + + else: + logger.warning("No metadata file found for provided raw file.") + return {} + + +class LytroLfrFormat(LytroFormat): + """This is the Lytro Illum LFR format. + The lfr is a image and meta data container format as used by the + Lytro Illum light field camera. + The format will read the specified lfr file. + This format does not support writing. + + Parameters for reading + ---------------------- + meta_only : bool + Whether to only read the metadata. + include_thumbnail : bool + Whether to include an image thumbnail in the metadata. + """ + + def _can_read(self, request): + # Check if mode and extensions are supported by the format + if request.extension in (".lfr",): + return True + + # -- reader + + class Reader(Format.Reader): + def _open(self, meta_only=False, include_thumbnail=True): + self._file = self.request.get_file() + self._data = None + self._chunks = {} + self.metadata = {} + self._content = None + self._meta_only = meta_only + self._include_thumbnail = include_thumbnail + + self._find_header() + self._find_chunks() + self._find_meta() + + try: + # Get sha1 dict and check if it is in dictionary of data chunks + chunk_dict = self._content["frames"][0]["frame"] + if ( + chunk_dict["metadataRef"] in self._chunks + and chunk_dict["imageRef"] in self._chunks + and chunk_dict["privateMetadataRef"] in self._chunks + ): + if not self._meta_only: + # Read raw image data byte buffer + data_pos, size = self._chunks[chunk_dict["imageRef"]] + self._file.seek(data_pos, 0) + self.raw_image_data = self._file.read(size) + + # Read meta data + data_pos, size = self._chunks[chunk_dict["metadataRef"]] + self._file.seek(data_pos, 0) + metadata = self._file.read(size) + # Add metadata to meta data dict + self.metadata["metadata"] = json.loads(metadata.decode("ASCII")) + + # Read private metadata + data_pos, size = self._chunks[chunk_dict["privateMetadataRef"]] + self._file.seek(data_pos, 0) + serial_numbers = self._file.read(size) + self.serial_numbers = json.loads(serial_numbers.decode("ASCII")) + # Add private metadata to meta data dict + self.metadata["privateMetadata"] = self.serial_numbers + + # Read image preview thumbnail + if self._include_thumbnail: + chunk_dict = self._content["thumbnails"][0] + if chunk_dict["imageRef"] in self._chunks: + # Read thumbnail image from thumbnail chunk + data_pos, size = self._chunks[chunk_dict["imageRef"]] + self._file.seek(data_pos, 0) + # Read binary data, read image as jpeg + thumbnail_data = self._file.read(size) + thumbnail_img = imread(thumbnail_data, format="jpeg") + + thumbnail_height = chunk_dict["height"] + thumbnail_width = chunk_dict["width"] + + # Add thumbnail to metadata + self.metadata["thumbnail"] = { + "image": thumbnail_img, + "height": thumbnail_height, + "width": thumbnail_width, + } + + except KeyError: + raise RuntimeError("The specified file is not a valid LFR file.") + + def _close(self): + # Close the reader. + # Note that the request object will close self._file + del self._data + + def _get_length(self): + # Return the number of images. Can be np.inf + return 1 + + def _find_header(self): + """ + Checks if file has correct header and skip it. + """ + file_header = b"\x89LFP\x0D\x0A\x1A\x0A\x00\x00\x00\x01" + # Read and check header of file + header = self._file.read(HEADER_LENGTH) + if header != file_header: + raise RuntimeError("The LFR file header is invalid.") + + # Read first bytes to skip header + self._file.read(SIZE_LENGTH) + + def _find_chunks(self): + """ + Gets start position and size of data chunks in file. + """ + chunk_header = b"\x89LFC\x0D\x0A\x1A\x0A\x00\x00\x00\x00" + + for i in range(0, DATA_CHUNKS_ILLUM): + data_pos, size, sha1 = self._get_chunk(chunk_header) + self._chunks[sha1] = (data_pos, size) + + def _find_meta(self): + """ + Gets a data chunk that contains information over content + of other data chunks. + """ + meta_header = b"\x89LFM\x0D\x0A\x1A\x0A\x00\x00\x00\x00" + data_pos, size, sha1 = self._get_chunk(meta_header) + + # Get content + self._file.seek(data_pos, 0) + data = self._file.read(size) + self._content = json.loads(data.decode("ASCII")) + + def _get_chunk(self, header): + """ + Checks if chunk has correct header and skips it. + Finds start position and length of next chunk and reads + sha1-string that identifies the following data chunk. + + Parameters + ---------- + header : bytes + Byte string that identifies start of chunk. + + Returns + ------- + data_pos : int + Start position of data chunk in file. + size : int + Size of data chunk. + sha1 : str + Sha1 value of chunk. + """ + # Read and check header of chunk + header_chunk = self._file.read(HEADER_LENGTH) + if header_chunk != header: + raise RuntimeError("The LFR chunk header is invalid.") + + data_pos = None + sha1 = None + + # Read size + size = struct.unpack(">i", self._file.read(SIZE_LENGTH))[0] + if size > 0: + # Read sha1 + sha1 = str(self._file.read(SHA1_LENGTH).decode("ASCII")) + # Skip fixed null chars + self._file.read(PADDING_LENGTH) + # Find start of data and skip data + data_pos = self._file.tell() + self._file.seek(size, 1) + # Skip extra null chars + ch = self._file.read(1) + while ch == b"\0": + ch = self._file.read(1) + self._file.seek(-1, 1) + + return data_pos, size, sha1 + + def _get_data(self, index): + # Return the data and meta data for the given index + if index not in [0, None]: + raise IndexError("Lytro lfr file contains only one dataset") + + if not self._meta_only: + # Read bytes from string and convert to uint16 + raw = np.frombuffer(self.raw_image_data, dtype=np.uint8).astype( + np.uint16 + ) + im = LytroIllumRawFormat.rearrange_bits(raw) + else: + im = np.array([]) + + # Return array and dummy meta data + return im, self.metadata + + def _get_meta_data(self, index): + # Get the meta data for the given index. If index is None, + # it returns the global meta data. + if index not in [0, None]: + raise IndexError("Lytro meta data file contains only one dataset") + + return self.metadata + + +class LytroF01RawFormat(LytroFormat): + """This is the Lytro RAW format for the original F01 Lytro camera. + The raw format is a 12bit image format as used by the Lytro F01 + light field camera. The format will read the specified raw file and will + try to load a .txt or .json file with the associated meta data. + This format does not support writing. + + + Parameters for reading + ---------------------- + meta_only : bool + Whether to only read the metadata. + + """ + + def _can_read(self, request): + # Check if mode and extensions are supported by the format + if request.extension in (".raw",): + return True + + @staticmethod + def rearrange_bits(array): + # Do bit rearrangement for the 12-bit lytro raw format + # Normalize output to 1.0 as float64 + t0 = array[0::3] + t1 = array[1::3] + t2 = array[2::3] + + a0 = np.left_shift(t0, 4) + np.right_shift(np.bitwise_and(t1, 240), 4) + a1 = np.left_shift(np.bitwise_and(t1, 15), 8) + t2 + + image = np.zeros(LYTRO_F01_IMAGE_SIZE, dtype=np.uint16) + image[:, 0::2] = a0.reshape( + (LYTRO_F01_IMAGE_SIZE[0], LYTRO_F01_IMAGE_SIZE[1] // 2) + ) + image[:, 1::2] = a1.reshape( + (LYTRO_F01_IMAGE_SIZE[0], LYTRO_F01_IMAGE_SIZE[1] // 2) + ) + + # Normalize data to 1.0 as 64-bit float. + # Division is by 4095 as the Lytro F01 saves 12-bit raw data. + return np.divide(image, 4095.0).astype(np.float64) + + # -- reader + + class Reader(Format.Reader): + def _open(self, meta_only=False): + self._file = self.request.get_file() + self._data = None + self._meta_only = meta_only + + def _close(self): + # Close the reader. + # Note that the request object will close self._file + del self._data + + def _get_length(self): + # Return the number of images. + return 1 + + def _get_data(self, index): + # Return the data and meta data for the given index + + if index not in [0, "None"]: + raise IndexError("Lytro file contains only one dataset") + + if not self._meta_only: + # Read all bytes + if self._data is None: + self._data = self._file.read() + + # Read bytes from string and convert to uint16 + raw = np.frombuffer(self._data, dtype=np.uint8).astype(np.uint16) + + # Rearrange bits + img = LytroF01RawFormat.rearrange_bits(raw) + + else: + img = np.array([]) + + # Return image and meta data + return img, self._get_meta_data(index=0) + + def _get_meta_data(self, index): + # Get the meta data for the given index. If index is None, it + # should return the global meta data. + + if index not in [0, None]: + raise IndexError("Lytro meta data file contains only one dataset") + + # Try to read meta data from meta data file corresponding + # to the raw data file, extension in [.txt, .TXT, .json, .JSON] + filename_base = os.path.splitext(self.request.get_local_filename())[0] + + meta_data = None + + for ext in [".txt", ".TXT", ".json", ".JSON"]: + if os.path.isfile(filename_base + ext): + meta_data = json.load(open(filename_base + ext)) + + if meta_data is not None: + return meta_data + + else: + logger.warning("No metadata file found for provided raw file.") + return {} + + +class LytroLfpFormat(LytroFormat): + """This is the Lytro Illum LFP format. + The lfp is a image and meta data container format as used by the + Lytro F01 light field camera. + The format will read the specified lfp file. + This format does not support writing. + + Parameters for reading + ---------------------- + meta_only : bool + Whether to only read the metadata. + include_thumbnail : bool + Whether to include an image thumbnail in the metadata. + """ + + def _can_read(self, request): + # Check if mode and extensions are supported by the format + if request.extension in (".lfp",): + return True + + # -- reader + + class Reader(Format.Reader): + def _open(self, meta_only=False): + self._file = self.request.get_file() + self._data = None + self._chunks = {} + self.metadata = {} + self._content = None + self._meta_only = meta_only + + self._find_header() + self._find_meta() + self._find_chunks() + + try: + # Get sha1 dict and check if it is in dictionary of data chunks + chunk_dict = self._content["picture"]["frameArray"][0]["frame"] + if ( + chunk_dict["metadataRef"] in self._chunks + and chunk_dict["imageRef"] in self._chunks + and chunk_dict["privateMetadataRef"] in self._chunks + ): + if not self._meta_only: + # Read raw image data byte buffer + data_pos, size = self._chunks[chunk_dict["imageRef"]] + self._file.seek(data_pos, 0) + self.raw_image_data = self._file.read(size) + + # Read meta data + data_pos, size = self._chunks[chunk_dict["metadataRef"]] + self._file.seek(data_pos, 0) + metadata = self._file.read(size) + # Add metadata to meta data dict + self.metadata["metadata"] = json.loads(metadata.decode("ASCII")) + + # Read private metadata + data_pos, size = self._chunks[chunk_dict["privateMetadataRef"]] + self._file.seek(data_pos, 0) + serial_numbers = self._file.read(size) + self.serial_numbers = json.loads(serial_numbers.decode("ASCII")) + # Add private metadata to meta data dict + self.metadata["privateMetadata"] = self.serial_numbers + + except KeyError: + raise RuntimeError("The specified file is not a valid LFP file.") + + def _close(self): + # Close the reader. + # Note that the request object will close self._file + del self._data + + def _get_length(self): + # Return the number of images. Can be np.inf + return 1 + + def _find_header(self): + """ + Checks if file has correct header and skip it. + """ + file_header = b"\x89LFP\x0D\x0A\x1A\x0A\x00\x00\x00\x01" + + # Read and check header of file + header = self._file.read(HEADER_LENGTH) + if header != file_header: + raise RuntimeError("The LFP file header is invalid.") + + # Read first bytes to skip header + self._file.read(SIZE_LENGTH) + + def _find_chunks(self): + """ + Gets start position and size of data chunks in file. + """ + chunk_header = b"\x89LFC\x0D\x0A\x1A\x0A\x00\x00\x00\x00" + + for i in range(0, DATA_CHUNKS_F01): + data_pos, size, sha1 = self._get_chunk(chunk_header) + self._chunks[sha1] = (data_pos, size) + + def _find_meta(self): + """ + Gets a data chunk that contains information over content + of other data chunks. + """ + meta_header = b"\x89LFM\x0D\x0A\x1A\x0A\x00\x00\x00\x00" + + data_pos, size, sha1 = self._get_chunk(meta_header) + + # Get content + self._file.seek(data_pos, 0) + data = self._file.read(size) + self._content = json.loads(data.decode("ASCII")) + data = self._file.read(5) # Skip 5 + + def _get_chunk(self, header): + """ + Checks if chunk has correct header and skips it. + Finds start position and length of next chunk and reads + sha1-string that identifies the following data chunk. + + Parameters + ---------- + header : bytes + Byte string that identifies start of chunk. + + Returns + ------- + data_pos : int + Start position of data chunk in file. + size : int + Size of data chunk. + sha1 : str + Sha1 value of chunk. + """ + # Read and check header of chunk + header_chunk = self._file.read(HEADER_LENGTH) + if header_chunk != header: + raise RuntimeError("The LFP chunk header is invalid.") + + data_pos = None + sha1 = None + + # Read size + size = struct.unpack(">i", self._file.read(SIZE_LENGTH))[0] + if size > 0: + # Read sha1 + sha1 = str(self._file.read(SHA1_LENGTH).decode("ASCII")) + # Skip fixed null chars + self._file.read(PADDING_LENGTH) + # Find start of data and skip data + data_pos = self._file.tell() + self._file.seek(size, 1) + # Skip extra null chars + ch = self._file.read(1) + while ch == b"\0": + ch = self._file.read(1) + self._file.seek(-1, 1) + + return data_pos, size, sha1 + + def _get_data(self, index): + # Return the data and meta data for the given index + if index not in [0, None]: + raise IndexError("Lytro lfp file contains only one dataset") + + if not self._meta_only: + # Read bytes from string and convert to uint16 + raw = np.frombuffer(self.raw_image_data, dtype=np.uint8).astype( + np.uint16 + ) + im = LytroF01RawFormat.rearrange_bits(raw) + else: + im = np.array([]) + + # Return array and dummy meta data + return im, self.metadata + + def _get_meta_data(self, index): + # Get the meta data for the given index. If index is None, + # it returns the global meta data. + if index not in [0, None]: + raise IndexError("Lytro meta data file contains only one dataset") + + return self.metadata diff --git a/.venv/Lib/site-packages/imageio/plugins/npz.py b/.venv/Lib/site-packages/imageio/plugins/npz.py new file mode 100644 index 00000000..87b37e44 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/npz.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +"""Read/Write NPZ files. + +Backend: `Numpy `_ + +NPZ is a file format by numpy that provides storage of array data using gzip +compression. This imageio plugin supports data of any shape, and also supports +multiple images per file. However, the npz format does not provide streaming; +all data is read/written at once. Further, there is no support for meta data. + +See the BSDF format for a similar (but more fully featured) format. + +Parameters +---------- +None + +Notes +----- +This format is not available on Pypy. + +""" + +import numpy as np + +from ..core import Format + + +class NpzFormat(Format): + """See :mod:`imageio.plugins.npz`""" + + def _can_read(self, request): + # We support any kind of image data + return request.extension in self.extensions + + def _can_write(self, request): + # We support any kind of image data + return request.extension in self.extensions + + # -- reader + + class Reader(Format.Reader): + def _open(self): + # Load npz file, which provides another file like object + self._npz = np.load(self.request.get_file()) + assert isinstance(self._npz, np.lib.npyio.NpzFile) + # Get list of names, ordered by name, but smarter + self._names = sorted(self._npz.files, key=lambda x: x.split("_")[-1]) + + def _close(self): + self._npz.close() + + def _get_length(self): + return len(self._names) + + def _get_data(self, index): + # Get data + if index < 0 or index >= len(self._names): + raise IndexError("Index out of range while reading from nzp") + im = self._npz[self._names[index]] + # Return array and empty meta data + return im, {} + + def _get_meta_data(self, index): + # Get the meta data for the given index + raise RuntimeError("The npz format does not support meta data.") + + # -- writer + + class Writer(Format.Writer): + def _open(self): + # Npz is not such a great format. We cannot stream to the file. + # So we remember all images and write them to file at the end. + self._images = [] + + def _close(self): + # Write everything + np.savez_compressed(self.request.get_file(), *self._images) + + def _append_data(self, im, meta): + self._images.append(im) # discart meta data + + def set_meta_data(self, meta): + raise RuntimeError("The npz format does not support meta data.") diff --git a/.venv/Lib/site-packages/imageio/plugins/opencv.py b/.venv/Lib/site-packages/imageio/plugins/opencv.py new file mode 100644 index 00000000..944a7577 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/opencv.py @@ -0,0 +1,313 @@ +"""Read/Write images using OpenCV. + +Backend Library: `OpenCV `_ + +This plugin wraps OpenCV (also known as ``cv2``), a popular image processing +library. Currently, it exposes OpenCVs image reading capability (no video or GIF +support yet); however, this may be added in future releases. + +Methods +------- +.. note:: + Check the respective function for a list of supported kwargs and their + documentation. + +.. autosummary:: + :toctree: + + OpenCVPlugin.read + OpenCVPlugin.iter + OpenCVPlugin.write + OpenCVPlugin.properties + OpenCVPlugin.metadata + +Pixel Formats (Colorspaces) +--------------------------- + +OpenCV is known to process images in BGR; however, most of the python ecosystem +(in particular matplotlib and other pydata libraries) use the RGB. As such, +images are converted to RGB, RGBA, or grayscale (where applicable) by default. + +""" + +import warnings +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +import cv2 +import numpy as np + +from ..core import Request +from ..core.request import URI_BYTES, InitializationError, IOMode +from ..core.v3_plugin_api import ImageProperties, PluginV3 +from ..typing import ArrayLike + + +class OpenCVPlugin(PluginV3): + def __init__(self, request: Request) -> None: + super().__init__(request) + + self.file_handle = request.get_local_filename() + if request._uri_type is URI_BYTES: + self.filename = "" + else: + self.filename = request.raw_uri + + mode = request.mode.io_mode + if mode == IOMode.read and not cv2.haveImageReader(self.file_handle): + raise InitializationError(f"OpenCV can't read `{self.filename}`.") + elif mode == IOMode.write and not cv2.haveImageWriter(self.file_handle): + raise InitializationError(f"OpenCV can't write to `{self.filename}`.") + + def read( + self, + *, + index: int = None, + colorspace: Union[int, str] = None, + flags: int = cv2.IMREAD_COLOR, + ) -> np.ndarray: + """Read an image from the ImageResource. + + Parameters + ---------- + index : int, Ellipsis + If int, read the index-th image from the ImageResource. If ``...``, + read all images from the ImageResource and stack them along a new, + prepended, batch dimension. If None (default), use ``index=0`` if + the image contains exactly one image and ``index=...`` otherwise. + colorspace : str, int + The colorspace to convert into after loading and before returning + the image. If None (default) keep grayscale images as is, convert + images with an alpha channel to ``RGBA`` and all other images to + ``RGB``. If int, interpret ``colorspace`` as one of OpenCVs + `conversion flags + `_ + and use it for conversion. If str, convert the image into the given + colorspace. Possible string values are: ``"RGB"``, ``"BGR"``, + ``"RGBA"``, ``"BGRA"``, ``"GRAY"``, ``"HSV"``, or ``"LAB"``. + flags : int + The OpenCV flag(s) to pass to the reader. Refer to the `OpenCV docs + `_ + for details. + + Returns + ------- + ndimage : np.ndarray + The decoded image as a numpy array. + + """ + + if index is None: + n_images = cv2.imcount(self.file_handle, flags) + index = 0 if n_images == 1 else ... + + if index is ...: + retval, img = cv2.imreadmulti(self.file_handle, flags=flags) + is_batch = True + else: + retval, img = cv2.imreadmulti(self.file_handle, index, 1, flags=flags) + is_batch = False + + if retval is False: + raise ValueError(f"Could not read index `{index}` from `{self.filename}`.") + + if img[0].ndim == 2: + in_colorspace = "GRAY" + out_colorspace = colorspace or "GRAY" + elif img[0].shape[-1] == 4: + in_colorspace = "BGRA" + out_colorspace = colorspace or "RGBA" + else: + in_colorspace = "BGR" + out_colorspace = colorspace or "RGB" + + if isinstance(colorspace, int): + cvt_space = colorspace + elif in_colorspace == out_colorspace.upper(): + cvt_space = None + else: + out_colorspace = out_colorspace.upper() + cvt_space = getattr(cv2, f"COLOR_{in_colorspace}2{out_colorspace}") + + if cvt_space is not None: + img = np.stack([cv2.cvtColor(x, cvt_space) for x in img]) + else: + img = np.stack(img) + + return img if is_batch else img[0] + + def iter( + self, + colorspace: Union[int, str] = None, + flags: int = cv2.IMREAD_COLOR, + ) -> np.ndarray: + """Yield images from the ImageResource. + + Parameters + ---------- + colorspace : str, int + The colorspace to convert into after loading and before returning + the image. If None (default) keep grayscale images as is, convert + images with an alpha channel to ``RGBA`` and all other images to + ``RGB``. If int, interpret ``colorspace`` as one of OpenCVs + `conversion flags + `_ + and use it for conversion. If str, convert the image into the given + colorspace. Possible string values are: ``"RGB"``, ``"BGR"``, + ``"RGBA"``, ``"BGRA"``, ``"GRAY"``, ``"HSV"``, or ``"LAB"``. + flags : int + The OpenCV flag(s) to pass to the reader. Refer to the `OpenCV docs + `_ + for details. + + Yields + ------ + ndimage : np.ndarray + The decoded image as a numpy array. + + """ + for idx in range(cv2.imcount(self.file_handle)): + yield self.read(index=idx, flags=flags, colorspace=colorspace) + + def write( + self, + ndimage: Union[ArrayLike, List[ArrayLike]], + is_batch: bool = False, + params: List[int] = None, + ) -> Optional[bytes]: + """Save an ndimage in the ImageResource. + + Parameters + ---------- + ndimage : ArrayLike, List[ArrayLike] + The image data that will be written to the file. It is either a + single image, a batch of images, or a list of images. + is_batch : bool + If True, the provided ndimage is a batch of images. If False (default), the + provided ndimage is a single image. If the provided ndimage is a list of images, + this parameter has no effect. + params : List[int] + A list of parameters that will be passed to OpenCVs imwrite or + imwritemulti functions. Possible values are documented in the + `OpenCV documentation + `_. + + Returns + ------- + encoded_image : bytes, None + If the ImageResource is ``""`` the call to write returns the + encoded image as a bytes string. Otherwise it returns None. + + """ + + if isinstance(ndimage, list): + ndimage = np.stack(ndimage, axis=0) + elif not is_batch: + ndimage = ndimage[None, ...] + + if ndimage[0].ndim == 2: + n_channels = 1 + else: + n_channels = ndimage[0].shape[-1] + + if n_channels == 1: + ndimage_cv2 = [x for x in ndimage] + elif n_channels == 4: + ndimage_cv2 = [cv2.cvtColor(x, cv2.COLOR_RGBA2BGRA) for x in ndimage] + else: + ndimage_cv2 = [cv2.cvtColor(x, cv2.COLOR_RGB2BGR) for x in ndimage] + + retval = cv2.imwritemulti(self.file_handle, ndimage_cv2, params) + + if retval is False: + # not sure what scenario would trigger this, but + # it can occur theoretically. + raise IOError("OpenCV failed to write.") # pragma: no cover + + if self.request._uri_type == URI_BYTES: + return Path(self.file_handle).read_bytes() + + def properties( + self, + index: int = None, + colorspace: Union[int, str] = None, + flags: int = cv2.IMREAD_COLOR, + ) -> ImageProperties: + """Standardized image metadata. + + Parameters + ---------- + index : int, Ellipsis + If int, get the properties of the index-th image in the + ImageResource. If ``...``, get the properties of the image stack + that contains all images. If None (default), use ``index=0`` if the + image contains exactly one image and ``index=...`` otherwise. + colorspace : str, int + The colorspace to convert into after loading and before returning + the image. If None (default) keep grayscale images as is, convert + images with an alpha channel to ``RGBA`` and all other images to + ``RGB``. If int, interpret ``colorspace`` as one of OpenCVs + `conversion flags + `_ + and use it for conversion. If str, convert the image into the given + colorspace. Possible string values are: ``"RGB"``, ``"BGR"``, + ``"RGBA"``, ``"BGRA"``, ``"GRAY"``, ``"HSV"``, or ``"LAB"``. + flags : int + The OpenCV flag(s) to pass to the reader. Refer to the `OpenCV docs + `_ + for details. + + Returns + ------- + props : ImageProperties + A dataclass filled with standardized image metadata. + + Notes + ----- + Reading properties with OpenCV involves decoding pixel data, because + OpenCV doesn't provide a direct way to access metadata. + + """ + + if index is None: + n_images = cv2.imcount(self.file_handle, flags) + is_batch = n_images > 1 + elif index is Ellipsis: + n_images = cv2.imcount(self.file_handle, flags) + is_batch = True + else: + is_batch = False + + # unfortunately, OpenCV doesn't allow reading shape without reading pixel data + if is_batch: + img = self.read(index=0, flags=flags, colorspace=colorspace) + return ImageProperties( + shape=(n_images, *img.shape), + dtype=img.dtype, + n_images=n_images, + is_batch=True, + ) + + img = self.read(index=index, flags=flags, colorspace=colorspace) + return ImageProperties(shape=img.shape, dtype=img.dtype, is_batch=False) + + def metadata( + self, index: int = None, exclude_applied: bool = True + ) -> Dict[str, Any]: + """Format-specific metadata. + + .. warning:: + OpenCV does not support reading metadata. When called, this function + will raise a ``NotImplementedError``. + + Parameters + ---------- + index : int + This parameter has no effect. + exclude_applied : bool + This parameter has no effect. + + """ + + warnings.warn("OpenCV does not support reading metadata.", UserWarning) + return dict() diff --git a/.venv/Lib/site-packages/imageio/plugins/pillow.py b/.venv/Lib/site-packages/imageio/plugins/pillow.py new file mode 100644 index 00000000..8826f35c --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/pillow.py @@ -0,0 +1,613 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read/Write images using Pillow/PIL. + +Backend Library: `Pillow `_ + +Plugin that wraps the the Pillow library. Pillow is a friendly fork of PIL +(Python Image Library) and supports reading and writing of common formats (jpg, +png, gif, tiff, ...). For, the complete list of features and supported formats +please refer to pillows official docs (see the Backend Library link). + +Parameters +---------- +request : Request + A request object representing the resource to be operated on. + +Methods +------- + +.. autosummary:: + :toctree: _plugins/pillow + + PillowPlugin.read + PillowPlugin.write + PillowPlugin.iter + PillowPlugin.get_meta + +""" + +import sys +import warnings +from io import BytesIO +from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Union, cast + +import numpy as np +from PIL import ExifTags, GifImagePlugin, Image, ImageSequence, UnidentifiedImageError +from PIL import __version__ as pil_version # type: ignore + +from ..core.request import URI_BYTES, InitializationError, IOMode, Request +from ..core.v3_plugin_api import ImageProperties, PluginV3 +from ..typing import ArrayLike + + +def pillow_version() -> Tuple[int]: + return tuple(int(x) for x in pil_version.split(".")) + + +def _exif_orientation_transform(orientation: int, mode: str) -> Callable: + # get transformation that transforms an image from a + # given EXIF orientation into the standard orientation + + # -1 if the mode has color channel, 0 otherwise + axis = -2 if Image.getmodebands(mode) > 1 else -1 + + EXIF_ORIENTATION = { + 1: lambda x: x, + 2: lambda x: np.flip(x, axis=axis), + 3: lambda x: np.rot90(x, k=2), + 4: lambda x: np.flip(x, axis=axis - 1), + 5: lambda x: np.flip(np.rot90(x, k=3), axis=axis), + 6: lambda x: np.rot90(x, k=3), + 7: lambda x: np.flip(np.rot90(x, k=1), axis=axis), + 8: lambda x: np.rot90(x, k=1), + } + + return EXIF_ORIENTATION[orientation] + + +class PillowPlugin(PluginV3): + def __init__(self, request: Request) -> None: + """Instantiate a new Pillow Plugin Object + + Parameters + ---------- + request : {Request} + A request object representing the resource to be operated on. + + """ + + super().__init__(request) + + # Register HEIF opener for Pillow + try: + from pillow_heif import register_heif_opener + except ImportError: + pass + else: + register_heif_opener() + + # Register AVIF opener for Pillow + try: + from pillow_heif import register_avif_opener + except ImportError: + pass + else: + register_avif_opener() + + self._image: Image = None + self.images_to_write = [] + + if request.mode.io_mode == IOMode.read: + try: + with Image.open(request.get_file()): + # Check if it is generally possible to read the image. + # This will not read any data and merely try to find a + # compatible pillow plugin (ref: the pillow docs). + pass + except UnidentifiedImageError: + if request._uri_type == URI_BYTES: + raise InitializationError( + "Pillow can not read the provided bytes." + ) from None + else: + raise InitializationError( + f"Pillow can not read {request.raw_uri}." + ) from None + + self._image = Image.open(self._request.get_file()) + else: + self.save_args = {} + + extension = self.request.extension or self.request.format_hint + if extension is None: + warnings.warn( + "Can't determine file format to write as. You _must_" + " set `format` during write or the call will fail. Use " + "`extension` to supress this warning. ", + UserWarning, + ) + return + + tirage = [Image.preinit, Image.init] + for format_loader in tirage: + format_loader() + if extension in Image.registered_extensions().keys(): + return + + raise InitializationError( + f"Pillow can not write `{extension}` files." + ) from None + + def close(self) -> None: + self._flush_writer() + + if self._image: + self._image.close() + + self._request.finish() + + def read( + self, + *, + index: int = None, + mode: str = None, + rotate: bool = False, + apply_gamma: bool = False, + writeable_output: bool = True, + pilmode: str = None, + exifrotate: bool = None, + as_gray: bool = None, + ) -> np.ndarray: + """ + Parses the given URI and creates a ndarray from it. + + Parameters + ---------- + index : int + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return it. + If index is an ellipsis (...), read all ndimages in the file and + stack them along a new batch dimension and return them. If index is + None, this plugin reads the first image of the file (index=0) unless + the image is a GIF or APNG, in which case all images are read + (index=...). + mode : str + Convert the image to the given mode before returning it. If None, + the mode will be left unchanged. Possible modes can be found at: + https://pillow.readthedocs.io/en/stable/handbook/concepts.html#modes + rotate : bool + If True and the image contains an EXIF orientation tag, + apply the orientation before returning the ndimage. + apply_gamma : bool + If True and the image contains metadata about gamma, apply gamma + correction to the image. + writable_output : bool + If True, ensure that the image is writable before returning it to + the user. This incurs a full copy of the pixel data if the data + served by pillow is read-only. Consequentially, setting this flag to + False improves performance for some images. + pilmode : str + Deprecated, use `mode` instead. + exifrotate : bool + Deprecated, use `rotate` instead. + as_gray : bool + Deprecated. Exists to raise a constructive error message. + + Returns + ------- + ndimage : ndarray + A numpy array containing the loaded image data + + Notes + ----- + If you read a paletted image (e.g. GIF) then the plugin will apply the + palette by default. Should you wish to read the palette indices of each + pixel use ``mode="P"``. The coresponding color pallete can be found in + the image's metadata using the ``palette`` key when metadata is + extracted using the ``exclude_applied=False`` kwarg. The latter is + needed, as palettes are applied by default and hence excluded by default + to keep metadata and pixel data consistent. + + """ + + if pilmode is not None: + warnings.warn( + "`pilmode` is deprecated. Use `mode` instead.", DeprecationWarning + ) + mode = pilmode + + if exifrotate is not None: + warnings.warn( + "`exifrotate` is deprecated. Use `rotate` instead.", DeprecationWarning + ) + rotate = exifrotate + + if as_gray is not None: + raise TypeError( + "The keyword `as_gray` is no longer supported." + "Use `mode='F'` for a backward-compatible result, or " + " `mode='L'` for an integer-valued result." + ) + + if self._image.format == "GIF": + # Converting GIF P frames to RGB + # https://github.com/python-pillow/Pillow/pull/6150 + GifImagePlugin.LOADING_STRATEGY = ( + GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY + ) + + if index is None: + if self._image.format == "GIF": + index = Ellipsis + elif self._image.custom_mimetype == "image/apng": + index = Ellipsis + else: + index = 0 + + if isinstance(index, int): + # will raise IO error if index >= number of frames in image + self._image.seek(index) + image = self._apply_transforms( + self._image, mode, rotate, apply_gamma, writeable_output + ) + else: + iterator = self.iter( + mode=mode, + rotate=rotate, + apply_gamma=apply_gamma, + writeable_output=writeable_output, + ) + image = np.stack([im for im in iterator], axis=0) + + return image + + def iter( + self, + *, + mode: str = None, + rotate: bool = False, + apply_gamma: bool = False, + writeable_output: bool = True, + ) -> Iterator[np.ndarray]: + """ + Iterate over all ndimages/frames in the URI + + Parameters + ---------- + mode : {str, None} + Convert the image to the given mode before returning it. If None, + the mode will be left unchanged. Possible modes can be found at: + https://pillow.readthedocs.io/en/stable/handbook/concepts.html#modes + rotate : {bool} + If set to ``True`` and the image contains an EXIF orientation tag, + apply the orientation before returning the ndimage. + apply_gamma : {bool} + If ``True`` and the image contains metadata about gamma, apply gamma + correction to the image. + writable_output : bool + If True, ensure that the image is writable before returning it to + the user. This incurs a full copy of the pixel data if the data + served by pillow is read-only. Consequentially, setting this flag to + False improves performance for some images. + """ + + for im in ImageSequence.Iterator(self._image): + yield self._apply_transforms( + im, mode, rotate, apply_gamma, writeable_output + ) + + def _apply_transforms( + self, image, mode, rotate, apply_gamma, writeable_output + ) -> np.ndarray: + if mode is not None: + image = image.convert(mode) + elif image.mode == "P": + # adjust for pillow9 changes + # see: https://github.com/python-pillow/Pillow/issues/5929 + image = image.convert(image.palette.mode) + elif image.format == "PNG" and image.mode == "I": + major, minor, patch = pillow_version() + + if sys.byteorder == "little": + desired_mode = "I;16" + else: # pragma: no cover + # can't test big-endian in GH-Actions + desired_mode = "I;16B" + + if major < 10: # pragma: no cover + warnings.warn( + "Loading 16-bit (uint16) PNG as int32 due to limitations " + "in pillow's PNG decoder. This will be fixed in a future " + "version of pillow which will make this warning dissapear.", + UserWarning, + ) + elif minor < 1: # pragma: no cover + # pillow<10.1.0 can directly decode into 16-bit grayscale + image.mode = desired_mode + else: + # pillow >= 10.1.0 + image = image.convert(desired_mode) + + image = np.asarray(image) + + meta = self.metadata(index=self._image.tell(), exclude_applied=False) + if rotate and "Orientation" in meta: + transformation = _exif_orientation_transform( + meta["Orientation"], self._image.mode + ) + image = transformation(image) + + if apply_gamma and "gamma" in meta: + gamma = float(meta["gamma"]) + scale = float(65536 if image.dtype == np.uint16 else 255) + gain = 1.0 + image = ((image / scale) ** gamma) * scale * gain + 0.4999 + image = np.round(image).astype(np.uint8) + + if writeable_output and not image.flags["WRITEABLE"]: + image = np.array(image) + + return image + + def write( + self, + ndimage: Union[ArrayLike, List[ArrayLike]], + *, + mode: str = None, + format: str = None, + is_batch: bool = None, + **kwargs, + ) -> Optional[bytes]: + """ + Write an ndimage to the URI specified in path. + + If the URI points to a file on the current host and the file does not + yet exist it will be created. If the file exists already, it will be + appended if possible; otherwise, it will be replaced. + + If necessary, the image is broken down along the leading dimension to + fit into individual frames of the chosen format. If the format doesn't + support multiple frames, and IOError is raised. + + Parameters + ---------- + image : ndarray or list + The ndimage to write. If a list is given each element is expected to + be an ndimage. + mode : str + Specify the image's color format. If None (default), the mode is + inferred from the array's shape and dtype. Possible modes can be + found at: + https://pillow.readthedocs.io/en/stable/handbook/concepts.html#modes + format : str + Optional format override. If omitted, the format to use is + determined from the filename extension. If a file object was used + instead of a filename, this parameter must always be used. + is_batch : bool + Explicitly tell the writer that ``image`` is a batch of images + (True) or not (False). If None, the writer will guess this from the + provided ``mode`` or ``image.shape``. While the latter often works, + it may cause problems for small images due to aliasing of spatial + and color-channel axes. + kwargs : ... + Extra arguments to pass to pillow. If a writer doesn't recognise an + option, it is silently ignored. The available options are described + in pillow's `image format documentation + `_ + for each writer. + + Notes + ----- + When writing batches of very narrow (2-4 pixels wide) gray images set + the ``mode`` explicitly to avoid the batch being identified as a colored + image. + + """ + if "fps" in kwargs: + warnings.warn( + "The keyword `fps` is no longer supported. Use `duration`" + "(in ms) instead, e.g. `fps=50` == `duration=20` (1000 * 1/50).", + DeprecationWarning, + ) + kwargs["duration"] = 1000 * 1 / kwargs.get("fps") + + if isinstance(ndimage, list): + ndimage = np.stack(ndimage, axis=0) + is_batch = True + else: + ndimage = np.asarray(ndimage) + + # check if ndimage is a batch of frames/pages (e.g. for writing GIF) + # if mode is given, use it; otherwise fall back to image.ndim only + if is_batch is not None: + pass + elif mode is not None: + is_batch = ( + ndimage.ndim > 3 if Image.getmodebands(mode) > 1 else ndimage.ndim > 2 + ) + elif ndimage.ndim == 2: + is_batch = False + elif ndimage.ndim == 3 and ndimage.shape[-1] == 1: + raise ValueError("Can't write images with one color channel.") + elif ndimage.ndim == 3 and ndimage.shape[-1] in [2, 3, 4]: + # Note: this makes a channel-last assumption + is_batch = False + else: + is_batch = True + + if not is_batch: + ndimage = ndimage[None, ...] + + for frame in ndimage: + pil_frame = Image.fromarray(frame, mode=mode) + if "bits" in kwargs: + pil_frame = pil_frame.quantize(colors=2 ** kwargs["bits"]) + self.images_to_write.append(pil_frame) + + if ( + format is not None + and "format" in self.save_args + and self.save_args["format"] != format + ): + old_format = self.save_args["format"] + warnings.warn( + "Changing the output format during incremental" + " writes is strongly discouraged." + f" Was `{old_format}`, is now `{format}`.", + UserWarning, + ) + + extension = self.request.extension or self.request.format_hint + self.save_args["format"] = format or Image.registered_extensions()[extension] + self.save_args.update(kwargs) + + # when writing to `bytes` we flush instantly + result = None + if self._request._uri_type == URI_BYTES: + self._flush_writer() + file = cast(BytesIO, self._request.get_file()) + result = file.getvalue() + + return result + + def _flush_writer(self): + if len(self.images_to_write) == 0: + return + + primary_image = self.images_to_write.pop(0) + + if len(self.images_to_write) > 0: + self.save_args["save_all"] = True + self.save_args["append_images"] = self.images_to_write + + primary_image.save(self._request.get_file(), **self.save_args) + self.images_to_write.clear() + self.save_args.clear() + + def get_meta(self, *, index=0) -> Dict[str, Any]: + return self.metadata(index=index, exclude_applied=False) + + def metadata( + self, index: int = None, exclude_applied: bool = True + ) -> Dict[str, Any]: + """Read ndimage metadata. + + Parameters + ---------- + index : {integer, None} + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return its + metadata. If index is an ellipsis (...), read and return global + metadata. If index is None, this plugin reads metadata from the + first image of the file (index=0) unless the image is a GIF or APNG, + in which case global metadata is read (index=...). + exclude_applied : bool + If True, exclude metadata fields that are applied to the image while + reading. For example, if the binary data contains a rotation flag, + the image is rotated by default and the rotation flag is excluded + from the metadata to avoid confusion. + + Returns + ------- + metadata : dict + A dictionary of format-specific metadata. + + """ + + if index is None: + if self._image.format == "GIF": + index = Ellipsis + elif self._image.custom_mimetype == "image/apng": + index = Ellipsis + else: + index = 0 + + if isinstance(index, int) and self._image.tell() != index: + self._image.seek(index) + + metadata = self._image.info.copy() + metadata["mode"] = self._image.mode + metadata["shape"] = self._image.size + + if self._image.mode == "P" and not exclude_applied: + metadata["palette"] = np.asarray(tuple(self._image.palette.colors.keys())) + + if self._image.getexif(): + exif_data = { + ExifTags.TAGS.get(key, "unknown"): value + for key, value in dict(self._image.getexif()).items() + } + exif_data.pop("unknown", None) + metadata.update(exif_data) + + if exclude_applied: + metadata.pop("Orientation", None) + + return metadata + + def properties(self, index: int = None) -> ImageProperties: + """Standardized ndimage metadata + Parameters + ---------- + index : int + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return its + properties. If index is an ellipsis (...), read and return the + properties of all ndimages in the file stacked along a new batch + dimension. If index is None, this plugin reads and returns the + properties of the first image (index=0) unless the image is a GIF or + APNG, in which case it reads and returns the properties all images + (index=...). + + Returns + ------- + properties : ImageProperties + A dataclass filled with standardized image metadata. + + Notes + ----- + This does not decode pixel data and is fast for large images. + + """ + + if index is None: + if self._image.format == "GIF": + index = Ellipsis + elif self._image.custom_mimetype == "image/apng": + index = Ellipsis + else: + index = 0 + + if index is Ellipsis: + self._image.seek(0) + else: + self._image.seek(index) + + if self._image.mode == "P": + # mode of palette images is determined by their palette + mode = self._image.palette.mode + else: + mode = self._image.mode + + width: int = self._image.width + height: int = self._image.height + shape: Tuple[int, ...] = (height, width) + + n_frames: Optional[int] = None + if index is ...: + n_frames = getattr(self._image, "n_frames", 1) + shape = (n_frames, *shape) + + dummy = np.asarray(Image.new(mode, (1, 1))) + pil_shape: Tuple[int, ...] = dummy.shape + if len(pil_shape) > 2: + shape = (*shape, *pil_shape[2:]) + + return ImageProperties( + shape=shape, + dtype=dummy.dtype, + n_images=n_frames, + is_batch=index is Ellipsis, + ) diff --git a/.venv/Lib/site-packages/imageio/plugins/pillow_info.py b/.venv/Lib/site-packages/imageio/plugins/pillow_info.py new file mode 100644 index 00000000..59b971ce --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/pillow_info.py @@ -0,0 +1,1053 @@ +# -*- coding: utf-8 -*- + +# styletest: ignore E122 E123 E501 + +""" +Module that contain info about the Pillow formats. The first part of +this module generates this info and writes it to its own bottom half +if run as a script. +""" + +import warnings + +warnings.warn( + "The `PillowFormat` plugin is deprecated and will be removed in ImageIO v3." + " Use the new `PillowPlugin` instead.", + DeprecationWarning, +) + + +def generate_info(): # pragma: no cover + from urllib.request import urlopen + import PIL + from PIL import Image + + Image.init() + + ids = [] + formats = [] + docs = {} + + # Collect formats and their summary from plugin modules + for mod_name in dir(PIL): + if "ImagePlugin" in mod_name: + mod = getattr(PIL, mod_name) + for ob_name in dir(mod): + ob = getattr(mod, ob_name) + if isinstance(ob, type) and issubclass(ob, Image.Image): + if ob.format in ids: + print("Found duplicate for", ob.format) + else: + ids.append(ob.format) + formats.append((ob.format, ob.format_description)) + + # Add extension info + for i in range(len(formats)): + id, summary = formats[i] + ext = " ".join([e for e in Image.EXTENSION if Image.EXTENSION[e] == id]) + formats[i] = id, summary, ext + + # Get documentation of formats + url = "https://raw.githubusercontent.com/python-pillow/Pillow/master/docs/handbook/image-file-formats.rst" # noqa + lines = urlopen(url).read().decode().splitlines() + lines.append("End") + lines.append("---") # for the end + + # Parse documentation + cur_name = "" + cur_part = [] + for i in range(len(lines)): + line = lines[i] + if line.startswith(("^^^", "---", "===")): + if cur_name and cur_name in ids: + text = "\n".join(cur_part[:-1]) + text = text.replace("versionadded::", "versionadded:: Pillow ") + text = text.replace("Image.open`", "Image.write`") + docs[cur_name] = text + cur_part = [] + cur_name = lines[i - 1].strip().replace(" ", "").upper() + else: + cur_part.append(" " + line) + + # Fill in the blancs + for id in ids: + if id in docs: + docs[id] = "*From the Pillow docs:*\n\n" + docs[id] + else: + docs[id] = "No docs for %s." % id + print("no docs for", id) + + # Sort before writing + formats.sort(key=lambda x: x[0]) + ids.sort() + + # Read file ... + code = open(__file__, "rb").read().decode() + code, divider, _ = code.partition("## BELOW IS " + "AUTOGENERATED") + code += divider + "\n\n" + + # Write formats + code += "pillow_formats = [\n" + for i in range(len(formats)): + print(formats[i]) + code += " (%r, %r, %r),\n" % formats[i] + code += " ]\n\n\n" + + # Write docs + code += "pillow_docs = {\n" + for id in ids: + code += '%r:\nu"""%s""",\n' % (id, docs[id]) + code += "}\n" + + # Write back + with open(__file__, "wb") as f: + f.write(code.encode()) + + +if __name__ == "__main__": + generate_info() + + +# BELOW IS AUTOGENERATED + +pillow_formats = [ + ("BMP", "Windows Bitmap", ".bmp"), + ("BUFR", "BUFR", ".bufr"), + ("CUR", "Windows Cursor", ".cur"), + ("DCX", "Intel DCX", ".dcx"), + ("DDS", "DirectDraw Surface", ".dds"), + ("DIB", "Windows Bitmap", ""), + ("EPS", "Encapsulated Postscript", ".ps .eps"), + ("FITS", "FITS", ".fit .fits"), + ("FLI", "Autodesk FLI/FLC Animation", ".fli .flc"), + ("FPX", "FlashPix", ".fpx"), + ("FTEX", "Texture File Format (IW2:EOC)", ".ftc .ftu"), + ("GBR", "GIMP brush file", ".gbr"), + ("GIF", "Compuserve GIF", ".gif"), + ("GRIB", "GRIB", ".grib"), + ("HDF5", "HDF5", ".h5 .hdf"), + ("ICNS", "Mac OS icns resource", ".icns"), + ("ICO", "Windows Icon", ".ico"), + ("IM", "IFUNC Image Memory", ".im"), + ("IMT", "IM Tools", ""), + ("IPTC", "IPTC/NAA", ".iim"), + ("JPEG", "JPEG (ISO 10918)", ".jfif .jpe .jpg .jpeg"), + ("JPEG2000", "JPEG 2000 (ISO 15444)", ".jp2 .j2k .jpc .jpf .jpx .j2c"), + ("MCIDAS", "McIdas area file", ""), + ("MIC", "Microsoft Image Composer", ".mic"), + ("MPEG", "MPEG", ".mpg .mpeg"), + ("MPO", "MPO (CIPA DC-007)", ".mpo"), + ("MSP", "Windows Paint", ".msp"), + ("PCD", "Kodak PhotoCD", ".pcd"), + ("PCX", "Paintbrush", ".pcx"), + ("PIXAR", "PIXAR raster image", ".pxr"), + ("PNG", "Portable network graphics", ".png"), + ("PPM", "Pbmplus image", ".pbm .pgm .ppm"), + ("PSD", "Adobe Photoshop", ".psd"), + ("SGI", "SGI Image File Format", ".bw .rgb .rgba .sgi"), + ("SPIDER", "Spider 2D image", ""), + ("SUN", "Sun Raster File", ".ras"), + ("TGA", "Targa", ".tga"), + ("TIFF", "Adobe TIFF", ".tif .tiff"), + ("WMF", "Windows Metafile", ".wmf .emf"), + ("XBM", "X11 Bitmap", ".xbm"), + ("XPM", "X11 Pixel Map", ".xpm"), + ("XVThumb", "XV thumbnail image", ""), +] + + +pillow_docs = { + "BMP": """*From the Pillow docs:* + + + PIL reads and writes Windows and OS/2 BMP files containing ``1``, ``L``, ``P``, + or ``RGB`` data. 16-colour images are read as ``P`` images. Run-length encoding + is not supported. + + The :py:meth:`~PIL.Image.Image.write` method sets the following + :py:attr:`~PIL.Image.Image.info` properties: + + **compression** + Set to ``bmp_rle`` if the file is run-length encoded. + """, + "BUFR": """*From the Pillow docs:* + + + .. versionadded:: Pillow 1.1.3 + + PIL provides a stub driver for BUFR files. + + To add read or write support to your application, use + :py:func:`PIL.BufrStubImagePlugin.register_handler`. + """, + "CUR": """*From the Pillow docs:* + + + CUR is used to store cursors on Windows. The CUR decoder reads the largest + available cursor. Animated cursors are not supported. + """, + "DCX": """*From the Pillow docs:* + + + DCX is a container file format for PCX files, defined by Intel. The DCX format + is commonly used in fax applications. The DCX decoder can read files containing + ``1``, ``L``, ``P``, or ``RGB`` data. + + When the file is opened, only the first image is read. You can use + :py:meth:`~file.seek` or :py:mod:`~PIL.ImageSequence` to read other images. + + """, + "DDS": """*From the Pillow docs:* + + + DDS is a popular container texture format used in video games and natively + supported by DirectX. + Currently, DXT1, DXT3, and DXT5 pixel formats are supported and only in ``RGBA`` + mode. + + .. versionadded:: Pillow 3.4.0 DXT3 + """, + "DIB": """No docs for DIB.""", + "EPS": """*From the Pillow docs:* + + + PIL identifies EPS files containing image data, and can read files that contain + embedded raster images (ImageData descriptors). If Ghostscript is available, + other EPS files can be read as well. The EPS driver can also write EPS + images. The EPS driver can read EPS images in ``L``, ``LAB``, ``RGB`` and + ``CMYK`` mode, but Ghostscript may convert the images to ``RGB`` mode rather + than leaving them in the original color space. The EPS driver can write images + in ``L``, ``RGB`` and ``CMYK`` modes. + + If Ghostscript is available, you can call the :py:meth:`~PIL.Image.Image.load` + method with the following parameter to affect how Ghostscript renders the EPS + + **scale** + Affects the scale of the resultant rasterized image. If the EPS suggests + that the image be rendered at 100px x 100px, setting this parameter to + 2 will make the Ghostscript render a 200px x 200px image instead. The + relative position of the bounding box is maintained:: + + im = Image.open(...) + im.size #(100,100) + im.load(scale=2) + im.size #(200,200) + """, + "FITS": """*From the Pillow docs:* + + + .. versionadded:: Pillow 1.1.5 + + PIL provides a stub driver for FITS files. + + To add read or write support to your application, use + :py:func:`PIL.FitsStubImagePlugin.register_handler`. + """, + "FLI": """No docs for FLI.""", + "FPX": """*From the Pillow docs:* + + + PIL reads Kodak FlashPix files. In the current version, only the highest + resolution image is read from the file, and the viewing transform is not taken + into account. + + .. note:: + + To enable full FlashPix support, you need to build and install the IJG JPEG + library before building the Python Imaging Library. See the distribution + README for details. + """, + "FTEX": """*From the Pillow docs:* + + + .. versionadded:: Pillow 3.2.0 + + The FTEX decoder reads textures used for 3D objects in + Independence War 2: Edge Of Chaos. The plugin reads a single texture + per file, in the compressed and uncompressed formats. + """, + "GBR": """*From the Pillow docs:* + + + The GBR decoder reads GIMP brush files, version 1 and 2. + + The :py:meth:`~PIL.Image.Image.write` method sets the following + :py:attr:`~PIL.Image.Image.info` properties: + + **comment** + The brush name. + + **spacing** + The spacing between the brushes, in pixels. Version 2 only. + + GD + ^^ + + PIL reads uncompressed GD files. Note that this file format cannot be + automatically identified, so you must use :py:func:`PIL.GdImageFile.open` to + read such a file. + + The :py:meth:`~PIL.Image.Image.write` method sets the following + :py:attr:`~PIL.Image.Image.info` properties: + + **transparency** + Transparency color index. This key is omitted if the image is not + transparent. + """, + "GIF": """*From the Pillow docs:* + + + PIL reads GIF87a and GIF89a versions of the GIF file format. The library writes + run-length encoded files in GIF87a by default, unless GIF89a features + are used or GIF89a is already in use. + + Note that GIF files are always read as grayscale (``L``) + or palette mode (``P``) images. + + The :py:meth:`~PIL.Image.Image.write` method sets the following + :py:attr:`~PIL.Image.Image.info` properties: + + **background** + Default background color (a palette color index). + + **transparency** + Transparency color index. This key is omitted if the image is not + transparent. + + **version** + Version (either ``GIF87a`` or ``GIF89a``). + + **duration** + May not be present. The time to display the current frame + of the GIF, in milliseconds. + + **loop** + May not be present. The number of times the GIF should loop. + + Reading sequences + ~~~~~~~~~~~~~~~~~ + + The GIF loader supports the :py:meth:`~file.seek` and :py:meth:`~file.tell` + methods. You can seek to the next frame (``im.seek(im.tell() + 1)``), or rewind + the file by seeking to the first frame. Random access is not supported. + + ``im.seek()`` raises an ``EOFError`` if you try to seek after the last frame. + + Saving + ~~~~~~ + + When calling :py:meth:`~PIL.Image.Image.save`, the following options + are available:: + + im.save(out, save_all=True, append_images=[im1, im2, ...]) + + **save_all** + If present and true, all frames of the image will be saved. If + not, then only the first frame of a multiframe image will be saved. + + **append_images** + A list of images to append as additional frames. Each of the + images in the list can be single or multiframe images. + This is currently only supported for GIF, PDF, TIFF, and WebP. + + **duration** + The display duration of each frame of the multiframe gif, in + milliseconds. Pass a single integer for a constant duration, or a + list or tuple to set the duration for each frame separately. + + **loop** + Integer number of times the GIF should loop. + + **optimize** + If present and true, attempt to compress the palette by + eliminating unused colors. This is only useful if the palette can + be compressed to the next smaller power of 2 elements. + + **palette** + Use the specified palette for the saved image. The palette should + be a bytes or bytearray object containing the palette entries in + RGBRGB... form. It should be no more than 768 bytes. Alternately, + the palette can be passed in as an + :py:class:`PIL.ImagePalette.ImagePalette` object. + + **disposal** + Indicates the way in which the graphic is to be treated after being displayed. + + * 0 - No disposal specified. + * 1 - Do not dispose. + * 2 - Restore to background color. + * 3 - Restore to previous content. + + Pass a single integer for a constant disposal, or a list or tuple + to set the disposal for each frame separately. + + Reading local images + ~~~~~~~~~~~~~~~~~~~~ + + The GIF loader creates an image memory the same size as the GIF file’s *logical + screen size*, and pastes the actual pixel data (the *local image*) into this + image. If you only want the actual pixel rectangle, you can manipulate the + :py:attr:`~PIL.Image.Image.size` and :py:attr:`~PIL.Image.Image.tile` + attributes before loading the file:: + + im = Image.open(...) + + if im.tile[0][0] == "gif": + # only read the first "local image" from this GIF file + tag, (x0, y0, x1, y1), offset, extra = im.tile[0] + im.size = (x1 - x0, y1 - y0) + im.tile = [(tag, (0, 0) + im.size, offset, extra)] + """, + "GRIB": """*From the Pillow docs:* + + + .. versionadded:: Pillow 1.1.5 + + PIL provides a stub driver for GRIB files. + + The driver requires the file to start with a GRIB header. If you have files + with embedded GRIB data, or files with multiple GRIB fields, your application + has to seek to the header before passing the file handle to PIL. + + To add read or write support to your application, use + :py:func:`PIL.GribStubImagePlugin.register_handler`. + """, + "HDF5": """*From the Pillow docs:* + + + .. versionadded:: Pillow 1.1.5 + + PIL provides a stub driver for HDF5 files. + + To add read or write support to your application, use + :py:func:`PIL.Hdf5StubImagePlugin.register_handler`. + """, + "ICNS": """*From the Pillow docs:* + + + PIL reads and (macOS only) writes macOS ``.icns`` files. By default, the + largest available icon is read, though you can override this by setting the + :py:attr:`~PIL.Image.Image.size` property before calling + :py:meth:`~PIL.Image.Image.load`. The :py:meth:`~PIL.Image.Image.write` method + sets the following :py:attr:`~PIL.Image.Image.info` property: + + **sizes** + A list of supported sizes found in this icon file; these are a + 3-tuple, ``(width, height, scale)``, where ``scale`` is 2 for a retina + icon and 1 for a standard icon. You *are* permitted to use this 3-tuple + format for the :py:attr:`~PIL.Image.Image.size` property if you set it + before calling :py:meth:`~PIL.Image.Image.load`; after loading, the size + will be reset to a 2-tuple containing pixel dimensions (so, e.g. if you + ask for ``(512, 512, 2)``, the final value of + :py:attr:`~PIL.Image.Image.size` will be ``(1024, 1024)``). + """, + "ICO": """*From the Pillow docs:* + + + ICO is used to store icons on Windows. The largest available icon is read. + + The :py:meth:`~PIL.Image.Image.save` method supports the following options: + + **sizes** + A list of sizes including in this ico file; these are a 2-tuple, + ``(width, height)``; Default to ``[(16, 16), (24, 24), (32, 32), (48, 48), + (64, 64), (128, 128), (256, 256)]``. Any sizes bigger than the original + size or 256 will be ignored. + + IM + ^^ + + IM is a format used by LabEye and other applications based on the IFUNC image + processing library. The library reads and writes most uncompressed interchange + versions of this format. + + IM is the only format that can store all internal PIL formats. + """, + "IM": """No docs for IM.""", + "IMT": """*From the Pillow docs:* + + + PIL reads Image Tools images containing ``L`` data. + """, + "IPTC": """No docs for IPTC.""", + "JPEG": """*From the Pillow docs:* + + + PIL reads JPEG, JFIF, and Adobe JPEG files containing ``L``, ``RGB``, or + ``CMYK`` data. It writes standard and progressive JFIF files. + + Using the :py:meth:`~PIL.Image.Image.draft` method, you can speed things up by + converting ``RGB`` images to ``L``, and resize images to 1/2, 1/4 or 1/8 of + their original size while loading them. + + The :py:meth:`~PIL.Image.Image.write` method may set the following + :py:attr:`~PIL.Image.Image.info` properties if available: + + **jfif** + JFIF application marker found. If the file is not a JFIF file, this key is + not present. + + **jfif_version** + A tuple representing the jfif version, (major version, minor version). + + **jfif_density** + A tuple representing the pixel density of the image, in units specified + by jfif_unit. + + **jfif_unit** + Units for the jfif_density: + + * 0 - No Units + * 1 - Pixels per Inch + * 2 - Pixels per Centimeter + + **dpi** + A tuple representing the reported pixel density in pixels per inch, if + the file is a jfif file and the units are in inches. + + **adobe** + Adobe application marker found. If the file is not an Adobe JPEG file, this + key is not present. + + **adobe_transform** + Vendor Specific Tag. + + **progression** + Indicates that this is a progressive JPEG file. + + **icc_profile** + The ICC color profile for the image. + + **exif** + Raw EXIF data from the image. + + + The :py:meth:`~PIL.Image.Image.save` method supports the following options: + + **quality** + The image quality, on a scale from 1 (worst) to 95 (best). The default is + 75. Values above 95 should be avoided; 100 disables portions of the JPEG + compression algorithm, and results in large files with hardly any gain in + image quality. + + **optimize** + If present and true, indicates that the encoder should make an extra pass + over the image in order to select optimal encoder settings. + + **progressive** + If present and true, indicates that this image should be stored as a + progressive JPEG file. + + **dpi** + A tuple of integers representing the pixel density, ``(x,y)``. + + **icc_profile** + If present and true, the image is stored with the provided ICC profile. + If this parameter is not provided, the image will be saved with no profile + attached. To preserve the existing profile:: + + im.save(filename, 'jpeg', icc_profile=im.info.get('icc_profile')) + + **exif** + If present, the image will be stored with the provided raw EXIF data. + + **subsampling** + If present, sets the subsampling for the encoder. + + * ``keep``: Only valid for JPEG files, will retain the original image setting. + * ``4:4:4``, ``4:2:2``, ``4:2:0``: Specific sampling values + * ``-1``: equivalent to ``keep`` + * ``0``: equivalent to ``4:4:4`` + * ``1``: equivalent to ``4:2:2`` + * ``2``: equivalent to ``4:2:0`` + + **qtables** + If present, sets the qtables for the encoder. This is listed as an + advanced option for wizards in the JPEG documentation. Use with + caution. ``qtables`` can be one of several types of values: + + * a string, naming a preset, e.g. ``keep``, ``web_low``, or ``web_high`` + * a list, tuple, or dictionary (with integer keys = + range(len(keys))) of lists of 64 integers. There must be + between 2 and 4 tables. + + .. versionadded:: Pillow 2.5.0 + + + .. note:: + + To enable JPEG support, you need to build and install the IJG JPEG library + before building the Python Imaging Library. See the distribution README for + details. + """, + "JPEG2000": """*From the Pillow docs:* + + + .. versionadded:: Pillow 2.4.0 + + PIL reads and writes JPEG 2000 files containing ``L``, ``LA``, ``RGB`` or + ``RGBA`` data. It can also read files containing ``YCbCr`` data, which it + converts on read into ``RGB`` or ``RGBA`` depending on whether or not there is + an alpha channel. PIL supports JPEG 2000 raw codestreams (``.j2k`` files), as + well as boxed JPEG 2000 files (``.j2p`` or ``.jpx`` files). PIL does *not* + support files whose components have different sampling frequencies. + + When loading, if you set the ``mode`` on the image prior to the + :py:meth:`~PIL.Image.Image.load` method being invoked, you can ask PIL to + convert the image to either ``RGB`` or ``RGBA`` rather than choosing for + itself. It is also possible to set ``reduce`` to the number of resolutions to + discard (each one reduces the size of the resulting image by a factor of 2), + and ``layers`` to specify the number of quality layers to load. + + The :py:meth:`~PIL.Image.Image.save` method supports the following options: + + **offset** + The image offset, as a tuple of integers, e.g. (16, 16) + + **tile_offset** + The tile offset, again as a 2-tuple of integers. + + **tile_size** + The tile size as a 2-tuple. If not specified, or if set to None, the + image will be saved without tiling. + + **quality_mode** + Either `"rates"` or `"dB"` depending on the units you want to use to + specify image quality. + + **quality_layers** + A sequence of numbers, each of which represents either an approximate size + reduction (if quality mode is `"rates"`) or a signal to noise ratio value + in decibels. If not specified, defaults to a single layer of full quality. + + **num_resolutions** + The number of different image resolutions to be stored (which corresponds + to the number of Discrete Wavelet Transform decompositions plus one). + + **codeblock_size** + The code-block size as a 2-tuple. Minimum size is 4 x 4, maximum is 1024 x + 1024, with the additional restriction that no code-block may have more + than 4096 coefficients (i.e. the product of the two numbers must be no + greater than 4096). + + **precinct_size** + The precinct size as a 2-tuple. Must be a power of two along both axes, + and must be greater than the code-block size. + + **irreversible** + If ``True``, use the lossy Irreversible Color Transformation + followed by DWT 9-7. Defaults to ``False``, which means to use the + Reversible Color Transformation with DWT 5-3. + + **progression** + Controls the progression order; must be one of ``"LRCP"``, ``"RLCP"``, + ``"RPCL"``, ``"PCRL"``, ``"CPRL"``. The letters stand for Component, + Position, Resolution and Layer respectively and control the order of + encoding, the idea being that e.g. an image encoded using LRCP mode can + have its quality layers decoded as they arrive at the decoder, while one + encoded using RLCP mode will have increasing resolutions decoded as they + arrive, and so on. + + **cinema_mode** + Set the encoder to produce output compliant with the digital cinema + specifications. The options here are ``"no"`` (the default), + ``"cinema2k-24"`` for 24fps 2K, ``"cinema2k-48"`` for 48fps 2K, and + ``"cinema4k-24"`` for 24fps 4K. Note that for compliant 2K files, + *at least one* of your image dimensions must match 2048 x 1080, while + for compliant 4K files, *at least one* of the dimensions must match + 4096 x 2160. + + .. note:: + + To enable JPEG 2000 support, you need to build and install the OpenJPEG + library, version 2.0.0 or higher, before building the Python Imaging + Library. + + Windows users can install the OpenJPEG binaries available on the + OpenJPEG website, but must add them to their PATH in order to use PIL (if + you fail to do this, you will get errors about not being able to load the + ``_imaging`` DLL). + """, + "MCIDAS": """*From the Pillow docs:* + + + PIL identifies and reads 8-bit McIdas area files. + """, + "MIC": """*From the Pillow docs:* + + + PIL identifies and reads Microsoft Image Composer (MIC) files. When opened, the + first sprite in the file is loaded. You can use :py:meth:`~file.seek` and + :py:meth:`~file.tell` to read other sprites from the file. + + Note that there may be an embedded gamma of 2.2 in MIC files. + """, + "MPEG": """*From the Pillow docs:* + + + PIL identifies MPEG files. + """, + "MPO": """*From the Pillow docs:* + + + Pillow identifies and reads Multi Picture Object (MPO) files, loading the primary + image when first opened. The :py:meth:`~file.seek` and :py:meth:`~file.tell` + methods may be used to read other pictures from the file. The pictures are + zero-indexed and random access is supported. + """, + "MSP": """*From the Pillow docs:* + + + PIL identifies and reads MSP files from Windows 1 and 2. The library writes + uncompressed (Windows 1) versions of this format. + """, + "PCD": """*From the Pillow docs:* + + + PIL reads PhotoCD files containing ``RGB`` data. This only reads the 768x512 + resolution image from the file. Higher resolutions are encoded in a proprietary + encoding. + """, + "PCX": """*From the Pillow docs:* + + + PIL reads and writes PCX files containing ``1``, ``L``, ``P``, or ``RGB`` data. + """, + "PIXAR": """*From the Pillow docs:* + + + PIL provides limited support for PIXAR raster files. The library can identify + and read “dumped” RGB files. + + The format code is ``PIXAR``. + """, + "PNG": """*From the Pillow docs:* + + + PIL identifies, reads, and writes PNG files containing ``1``, ``L``, ``P``, + ``RGB``, or ``RGBA`` data. Interlaced files are supported as of v1.1.7. + + The :py:meth:`~PIL.Image.Image.write` method sets the following + :py:attr:`~PIL.Image.Image.info` properties, when appropriate: + + **chromaticity** + The chromaticity points, as an 8 tuple of floats. (``White Point + X``, ``White Point Y``, ``Red X``, ``Red Y``, ``Green X``, ``Green + Y``, ``Blue X``, ``Blue Y``) + + **gamma** + Gamma, given as a floating point number. + + **srgb** + The sRGB rendering intent as an integer. + + * 0 Perceptual + * 1 Relative Colorimetric + * 2 Saturation + * 3 Absolute Colorimetric + + **transparency** + For ``P`` images: Either the palette index for full transparent pixels, + or a byte string with alpha values for each palette entry. + + For ``L`` and ``RGB`` images, the color that represents full transparent + pixels in this image. + + This key is omitted if the image is not a transparent palette image. + + ``Open`` also sets ``Image.text`` to a list of the values of the + ``tEXt``, ``zTXt``, and ``iTXt`` chunks of the PNG image. Individual + compressed chunks are limited to a decompressed size of + ``PngImagePlugin.MAX_TEXT_CHUNK``, by default 1MB, to prevent + decompression bombs. Additionally, the total size of all of the text + chunks is limited to ``PngImagePlugin.MAX_TEXT_MEMORY``, defaulting to + 64MB. + + The :py:meth:`~PIL.Image.Image.save` method supports the following options: + + **optimize** + If present and true, instructs the PNG writer to make the output file as + small as possible. This includes extra processing in order to find optimal + encoder settings. + + **transparency** + For ``P``, ``L``, and ``RGB`` images, this option controls what + color image to mark as transparent. + + For ``P`` images, this can be a either the palette index, + or a byte string with alpha values for each palette entry. + + **dpi** + A tuple of two numbers corresponding to the desired dpi in each direction. + + **pnginfo** + A :py:class:`PIL.PngImagePlugin.PngInfo` instance containing text tags. + + **compress_level** + ZLIB compression level, a number between 0 and 9: 1 gives best speed, + 9 gives best compression, 0 gives no compression at all. Default is 6. + When ``optimize`` option is True ``compress_level`` has no effect + (it is set to 9 regardless of a value passed). + + **icc_profile** + The ICC Profile to include in the saved file. + + **bits (experimental)** + For ``P`` images, this option controls how many bits to store. If omitted, + the PNG writer uses 8 bits (256 colors). + + **dictionary (experimental)** + Set the ZLIB encoder dictionary. + + .. note:: + + To enable PNG support, you need to build and install the ZLIB compression + library before building the Python Imaging Library. See the installation + documentation for details. + """, + "PPM": """*From the Pillow docs:* + + + PIL reads and writes PBM, PGM and PPM files containing ``1``, ``L`` or ``RGB`` + data. + """, + "PSD": """*From the Pillow docs:* + + + PIL identifies and reads PSD files written by Adobe Photoshop 2.5 and 3.0. + + """, + "SGI": """*From the Pillow docs:* + + + Pillow reads and writes uncompressed ``L``, ``RGB``, and ``RGBA`` files. + + """, + "SPIDER": """*From the Pillow docs:* + + + PIL reads and writes SPIDER image files of 32-bit floating point data + ("F;32F"). + + PIL also reads SPIDER stack files containing sequences of SPIDER images. The + :py:meth:`~file.seek` and :py:meth:`~file.tell` methods are supported, and + random access is allowed. + + The :py:meth:`~PIL.Image.Image.write` method sets the following attributes: + + **format** + Set to ``SPIDER`` + + **istack** + Set to 1 if the file is an image stack, else 0. + + **nimages** + Set to the number of images in the stack. + + A convenience method, :py:meth:`~PIL.Image.Image.convert2byte`, is provided for + converting floating point data to byte data (mode ``L``):: + + im = Image.open('image001.spi').convert2byte() + + Writing files in SPIDER format + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + The extension of SPIDER files may be any 3 alphanumeric characters. Therefore + the output format must be specified explicitly:: + + im.save('newimage.spi', format='SPIDER') + + For more information about the SPIDER image processing package, see the + `SPIDER homepage`_ at `Wadsworth Center`_. + + .. _SPIDER homepage: https://spider.wadsworth.org/spider_doc/spider/docs/spider.html + .. _Wadsworth Center: https://www.wadsworth.org/ + """, + "SUN": """No docs for SUN.""", + "TGA": """*From the Pillow docs:* + + + PIL reads 24- and 32-bit uncompressed and run-length encoded TGA files. + """, + "TIFF": """*From the Pillow docs:* + + + Pillow reads and writes TIFF files. It can read both striped and tiled + images, pixel and plane interleaved multi-band images. If you have + libtiff and its headers installed, PIL can read and write many kinds + of compressed TIFF files. If not, PIL will only read and write + uncompressed files. + + .. note:: + + Beginning in version 5.0.0, Pillow requires libtiff to read or + write compressed files. Prior to that release, Pillow had buggy + support for reading Packbits, LZW and JPEG compressed TIFFs + without using libtiff. + + The :py:meth:`~PIL.Image.Image.write` method sets the following + :py:attr:`~PIL.Image.Image.info` properties: + + **compression** + Compression mode. + + .. versionadded:: Pillow 2.0.0 + + **dpi** + Image resolution as an ``(xdpi, ydpi)`` tuple, where applicable. You can use + the :py:attr:`~PIL.Image.Image.tag` attribute to get more detailed + information about the image resolution. + + .. versionadded:: Pillow 1.1.5 + + **resolution** + Image resolution as an ``(xres, yres)`` tuple, where applicable. This is a + measurement in whichever unit is specified by the file. + + .. versionadded:: Pillow 1.1.5 + + + The :py:attr:`~PIL.Image.Image.tag_v2` attribute contains a dictionary + of TIFF metadata. The keys are numerical indexes from + :py:attr:`~PIL.TiffTags.TAGS_V2`. Values are strings or numbers for single + items, multiple values are returned in a tuple of values. Rational + numbers are returned as a :py:class:`~PIL.TiffImagePlugin.IFDRational` + object. + + .. versionadded:: Pillow 3.0.0 + + For compatibility with legacy code, the + :py:attr:`~PIL.Image.Image.tag` attribute contains a dictionary of + decoded TIFF fields as returned prior to version 3.0.0. Values are + returned as either strings or tuples of numeric values. Rational + numbers are returned as a tuple of ``(numerator, denominator)``. + + .. deprecated:: 3.0.0 + + + Saving Tiff Images + ~~~~~~~~~~~~~~~~~~ + + The :py:meth:`~PIL.Image.Image.save` method can take the following keyword arguments: + + **save_all** + If true, Pillow will save all frames of the image to a multiframe tiff document. + + .. versionadded:: Pillow 3.4.0 + + **tiffinfo** + A :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` object or dict + object containing tiff tags and values. The TIFF field type is + autodetected for Numeric and string values, any other types + require using an :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + object and setting the type in + :py:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype` with + the appropriate numerical value from + ``TiffTags.TYPES``. + + .. versionadded:: Pillow 2.3.0 + + Metadata values that are of the rational type should be passed in + using a :py:class:`~PIL.TiffImagePlugin.IFDRational` object. + + .. versionadded:: Pillow 3.1.0 + + For compatibility with legacy code, a + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` object may + be passed in this field. However, this is deprecated. + + .. versionadded:: Pillow 3.0.0 + + .. note:: + + Only some tags are currently supported when writing using + libtiff. The supported list is found in + :py:attr:`~PIL:TiffTags.LIBTIFF_CORE`. + + **compression** + A string containing the desired compression method for the + file. (valid only with libtiff installed) Valid compression + methods are: ``None``, ``"tiff_ccitt"``, ``"group3"``, + ``"group4"``, ``"tiff_jpeg"``, ``"tiff_adobe_deflate"``, + ``"tiff_thunderscan"``, ``"tiff_deflate"``, ``"tiff_sgilog"``, + ``"tiff_sgilog24"``, ``"tiff_raw_16"`` + + These arguments to set the tiff header fields are an alternative to + using the general tags available through tiffinfo. + + **description** + + **software** + + **date_time** + + **artist** + + **copyright** + Strings + + **resolution_unit** + A string of "inch", "centimeter" or "cm" + + **resolution** + + **x_resolution** + + **y_resolution** + + **dpi** + Either a Float, 2 tuple of (numerator, denominator) or a + :py:class:`~PIL.TiffImagePlugin.IFDRational`. Resolution implies + an equal x and y resolution, dpi also implies a unit of inches. + + """, + "WMF": """*From the Pillow docs:* + + + PIL can identify playable WMF files. + + In PIL 1.1.4 and earlier, the WMF driver provides some limited rendering + support, but not enough to be useful for any real application. + + In PIL 1.1.5 and later, the WMF driver is a stub driver. To add WMF read or + write support to your application, use + :py:func:`PIL.WmfImagePlugin.register_handler` to register a WMF handler. + + :: + + from PIL import Image + from PIL import WmfImagePlugin + + class WmfHandler: + def open(self, im): + ... + def load(self, im): + ... + return image + def save(self, im, fp, filename): + ... + + wmf_handler = WmfHandler() + + WmfImagePlugin.register_handler(wmf_handler) + + im = Image.open("sample.wmf")""", + "XBM": """*From the Pillow docs:* + + + PIL reads and writes X bitmap files (mode ``1``). + """, + "XPM": """*From the Pillow docs:* + + + PIL reads X pixmap files (mode ``P``) with 256 colors or less. + + The :py:meth:`~PIL.Image.Image.write` method sets the following + :py:attr:`~PIL.Image.Image.info` properties: + + **transparency** + Transparency color index. This key is omitted if the image is not + transparent. + """, + "XVThumb": """No docs for XVThumb.""", +} diff --git a/.venv/Lib/site-packages/imageio/plugins/pillow_legacy.py b/.venv/Lib/site-packages/imageio/plugins/pillow_legacy.py new file mode 100644 index 00000000..92d78beb --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/pillow_legacy.py @@ -0,0 +1,825 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read/Write images using pillow/PIL (legacy). + +Backend Library: `Pillow `_ + +Pillow is a friendly fork of PIL (Python Image Library) and supports +reading and writing of common formats (jpg, png, gif, tiff, ...). While +these docs provide an overview of some of its features, pillow is +constantly improving. Hence, the complete list of features can be found +in pillows official docs (see the Backend Library link). + +Parameters for Reading +---------------------- +pilmode : str + (Available for all formats except GIF-PIL) + From the Pillow documentation: + + * 'L' (8-bit pixels, grayscale) + * 'P' (8-bit pixels, mapped to any other mode using a color palette) + * 'RGB' (3x8-bit pixels, true color) + * 'RGBA' (4x8-bit pixels, true color with transparency mask) + * 'CMYK' (4x8-bit pixels, color separation) + * 'YCbCr' (3x8-bit pixels, color video format) + * 'I' (32-bit signed integer pixels) + * 'F' (32-bit floating point pixels) + + PIL also provides limited support for a few special modes, including + 'LA' ('L' with alpha), 'RGBX' (true color with padding) and 'RGBa' + (true color with premultiplied alpha). + + When translating a color image to grayscale (mode 'L', 'I' or 'F'), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 +as_gray : bool + (Available for all formats except GIF-PIL) + If True, the image is converted using mode 'F'. When `mode` is + not None and `as_gray` is True, the image is first converted + according to `mode`, and the result is then "flattened" using + mode 'F'. +ignoregamma : bool + (Only available in PNG-PIL) + Avoid gamma correction. Default True. +exifrotate : bool + (Only available in JPEG-PIL) + Automatically rotate the image according to exif flag. Default True. + + +Parameters for saving +--------------------- +optimize : bool + (Only available in PNG-PIL) + If present and true, instructs the PNG writer to make the output file + as small as possible. This includes extra processing in order to find + optimal encoder settings. +transparency: + (Only available in PNG-PIL) + This option controls what color image to mark as transparent. +dpi: tuple of two scalars + (Only available in PNG-PIL) + The desired dpi in each direction. +pnginfo: PIL.PngImagePlugin.PngInfo + (Only available in PNG-PIL) + Object containing text tags. +compress_level: int + (Only available in PNG-PIL) + ZLIB compression level, a number between 0 and 9: 1 gives best speed, + 9 gives best compression, 0 gives no compression at all. Default is 9. + When ``optimize`` option is True ``compress_level`` has no effect + (it is set to 9 regardless of a value passed). +compression: int + (Only available in PNG-PIL) + Compatibility with the freeimage PNG format. If given, it overrides + compress_level. +icc_profile: + (Only available in PNG-PIL) + The ICC Profile to include in the saved file. +bits (experimental): int + (Only available in PNG-PIL) + This option controls how many bits to store. If omitted, + the PNG writer uses 8 bits (256 colors). +quantize: + (Only available in PNG-PIL) + Compatibility with the freeimage PNG format. If given, it overrides + bits. In this case, given as a number between 1-256. +dictionary (experimental): dict + (Only available in PNG-PIL) + Set the ZLIB encoder dictionary. +prefer_uint8: bool + (Only available in PNG-PIL) + Let the PNG writer truncate uint16 image arrays to uint8 if their values fall + within the range [0, 255]. Defaults to true for legacy compatibility, however + it is recommended to set this to false to avoid unexpected behavior when + saving e.g. weakly saturated images. + +quality : scalar + (Only available in JPEG-PIL) + The compression factor of the saved image (1..100), higher + numbers result in higher quality but larger file size. Default 75. +progressive : bool + (Only available in JPEG-PIL) + Save as a progressive JPEG file (e.g. for images on the web). + Default False. +optimize : bool + (Only available in JPEG-PIL) + On saving, compute optimal Huffman coding tables (can reduce a few + percent of file size). Default False. +dpi : tuple of int + (Only available in JPEG-PIL) + The pixel density, ``(x,y)``. +icc_profile : object + (Only available in JPEG-PIL) + If present and true, the image is stored with the provided ICC profile. + If this parameter is not provided, the image will be saved with no + profile attached. +exif : dict + (Only available in JPEG-PIL) + If present, the image will be stored with the provided raw EXIF data. +subsampling : str + (Only available in JPEG-PIL) + Sets the subsampling for the encoder. See Pillow docs for details. +qtables : object + (Only available in JPEG-PIL) + Set the qtables for the encoder. See Pillow docs for details. +quality_mode : str + (Only available in JPEG2000-PIL) + Either `"rates"` or `"dB"` depending on the units you want to use to + specify image quality. +quality : float + (Only available in JPEG2000-PIL) + Approximate size reduction (if quality mode is `rates`) or a signal to noise ratio + in decibels (if quality mode is `dB`). +loop : int + (Only available in GIF-PIL) + The number of iterations. Default 0 (meaning loop indefinitely). +duration : {float, list} + (Only available in GIF-PIL) + The duration (in seconds) of each frame. Either specify one value + that is used for all frames, or one value for each frame. + Note that in the GIF format the duration/delay is expressed in + hundredths of a second, which limits the precision of the duration. +fps : float + (Only available in GIF-PIL) + The number of frames per second. If duration is not given, the + duration for each frame is set to 1/fps. Default 10. +palettesize : int + (Only available in GIF-PIL) + The number of colors to quantize the image to. Is rounded to + the nearest power of two. Default 256. +subrectangles : bool + (Only available in GIF-PIL) + If True, will try and optimize the GIF by storing only the + rectangular parts of each frame that change with respect to the + previous. Default False. + +Notes +----- +To enable JPEG 2000 support, you need to build and install the OpenJPEG library, +version 2.0.0 or higher, before building the Python Imaging Library. Windows +users can install the OpenJPEG binaries available on the OpenJPEG website, but +must add them to their PATH in order to use PIL (if you fail to do this, you +will get errors about not being able to load the ``_imaging`` DLL). + +GIF images read with this plugin are always RGBA. The alpha channel is ignored +when saving RGB images. +""" + +import logging +import threading + +import numpy as np + +from ..core import Format, image_as_uint +from ..core.request import URI_FILE, URI_BYTES + + +logger = logging.getLogger(__name__) + + +# todo: Pillow ImageGrab module supports grabbing the screen on Win and OSX. + + +GENERIC_DOCS = """ + Parameters for reading + ---------------------- + + pilmode : str + From the Pillow documentation: + + * 'L' (8-bit pixels, grayscale) + * 'P' (8-bit pixels, mapped to any other mode using a color palette) + * 'RGB' (3x8-bit pixels, true color) + * 'RGBA' (4x8-bit pixels, true color with transparency mask) + * 'CMYK' (4x8-bit pixels, color separation) + * 'YCbCr' (3x8-bit pixels, color video format) + * 'I' (32-bit signed integer pixels) + * 'F' (32-bit floating point pixels) + + PIL also provides limited support for a few special modes, including + 'LA' ('L' with alpha), 'RGBX' (true color with padding) and 'RGBa' + (true color with premultiplied alpha). + + When translating a color image to grayscale (mode 'L', 'I' or 'F'), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + as_gray : bool + If True, the image is converted using mode 'F'. When `mode` is + not None and `as_gray` is True, the image is first converted + according to `mode`, and the result is then "flattened" using + mode 'F'. +""" + + +class PillowFormat(Format): + """ + Base format class for Pillow formats. + """ + + _pillow_imported = False + _Image = None + _modes = "i" + _description = "" + + def __init__(self, *args, plugin_id: str = None, **kwargs): + super(PillowFormat, self).__init__(*args, **kwargs) + # Used to synchronize _init_pillow(), see #244 + self._lock = threading.RLock() + + self._plugin_id = plugin_id + + @property + def plugin_id(self): + """The PIL plugin id.""" + return self._plugin_id # Set when format is created + + def _init_pillow(self): + with self._lock: + if not self._pillow_imported: + self._pillow_imported = True # more like tried to import + import PIL + + if not hasattr(PIL, "__version__"): # pragma: no cover + raise ImportError( + "Imageio Pillow plugin requires " "Pillow, not PIL!" + ) + from PIL import Image + + self._Image = Image + elif self._Image is None: # pragma: no cover + raise RuntimeError("Imageio Pillow plugin requires " "Pillow lib.") + Image = self._Image + + if self.plugin_id in ("PNG", "JPEG", "BMP", "GIF", "PPM"): + Image.preinit() + else: + Image.init() + return Image + + def _can_read(self, request): + Image = self._init_pillow() + if self.plugin_id in Image.OPEN: + factory, accept = Image.OPEN[self.plugin_id] + if accept: + if request.firstbytes and accept(request.firstbytes): + return True + + def _can_write(self, request): + Image = self._init_pillow() + if request.extension in self.extensions or request._uri_type in [ + URI_FILE, + URI_BYTES, + ]: + if self.plugin_id in Image.SAVE: + return True + + class Reader(Format.Reader): + def _open(self, pilmode=None, as_gray=False): + Image = self.format._init_pillow() + try: + factory, accept = Image.OPEN[self.format.plugin_id] + except KeyError: + raise RuntimeError("Format %s cannot read images." % self.format.name) + self._fp = self._get_file() + self._im = factory(self._fp, "") + if hasattr(Image, "_decompression_bomb_check"): + Image._decompression_bomb_check(self._im.size) + # Save the raw mode used by the palette for a BMP because it may not be the number of channels + # When the data is read, imageio hands the palette to PIL to handle and clears the rawmode argument + # However, there is a bug in PIL with handling animated GIFs with a different color palette on each frame. + # This issue is resolved by using the raw palette data but the rawmode information is now lost. So we + # store the raw mode for later use + if self._im.palette and self._im.palette.dirty: + self._im.palette.rawmode_saved = self._im.palette.rawmode + pil_try_read(self._im) + # Store args + self._kwargs = dict( + as_gray=as_gray, is_gray=_palette_is_grayscale(self._im) + ) + # setting mode=None is not the same as just not providing it + if pilmode is not None: + self._kwargs["mode"] = pilmode + # Set length + self._length = 1 + if hasattr(self._im, "n_frames"): + self._length = self._im.n_frames + + def _get_file(self): + self._we_own_fp = False + return self.request.get_file() + + def _close(self): + save_pillow_close(self._im) + if self._we_own_fp: + self._fp.close() + # else: request object handles closing the _fp + + def _get_length(self): + return self._length + + def _seek(self, index): + try: + self._im.seek(index) + except EOFError: + raise IndexError("Could not seek to index %i" % index) + + def _get_data(self, index): + if index >= self._length: + raise IndexError("Image index %i > %i" % (index, self._length)) + i = self._im.tell() + if i > index: + self._seek(index) # just try + else: + while i < index: # some formats need to be read in sequence + i += 1 + self._seek(i) + if self._im.palette and self._im.palette.dirty: + self._im.palette.rawmode_saved = self._im.palette.rawmode + self._im.getdata()[0] + im = pil_get_frame(self._im, **self._kwargs) + return im, self._im.info + + def _get_meta_data(self, index): + if not (index is None or index == 0): + raise IndexError() + return self._im.info + + class Writer(Format.Writer): + def _open(self): + Image = self.format._init_pillow() + try: + self._save_func = Image.SAVE[self.format.plugin_id] + except KeyError: + raise RuntimeError("Format %s cannot write images." % self.format.name) + self._fp = self.request.get_file() + self._meta = {} + self._written = False + + def _close(self): + pass # request object handled closing _fp + + def _append_data(self, im, meta): + if self._written: + raise RuntimeError( + "Format %s only supports single images." % self.format.name + ) + # Pop unit dimension for grayscale images + if im.ndim == 3 and im.shape[-1] == 1: + im = im[:, :, 0] + self._written = True + self._meta.update(meta) + img = ndarray_to_pil( + im, self.format.plugin_id, self._meta.pop("prefer_uint8", True) + ) + if "bits" in self._meta: + img = img.quantize() # Make it a P image, so bits arg is used + img.save(self._fp, format=self.format.plugin_id, **self._meta) + save_pillow_close(img) + + def set_meta_data(self, meta): + self._meta.update(meta) + + +class PNGFormat(PillowFormat): + """See :mod:`imageio.plugins.pillow_legacy`""" + + class Reader(PillowFormat.Reader): + def _open(self, pilmode=None, as_gray=False, ignoregamma=True): + return PillowFormat.Reader._open(self, pilmode=pilmode, as_gray=as_gray) + + def _get_data(self, index): + im, info = PillowFormat.Reader._get_data(self, index) + if not self.request.kwargs.get("ignoregamma", True): + # The gamma value in the file represents the gamma factor for the + # hardware on the system where the file was created, and is meant + # to be able to match the colors with the system on which the + # image is shown. See also issue #366 + try: + gamma = float(info["gamma"]) + except (KeyError, ValueError): + pass + else: + scale = float(65536 if im.dtype == np.uint16 else 255) + gain = 1.0 + im[:] = ((im / scale) ** gamma) * scale * gain + 0.4999 + return im, info + + # -- + + class Writer(PillowFormat.Writer): + def _open(self, compression=None, quantize=None, interlaced=False, **kwargs): + # Better default for compression + kwargs["compress_level"] = kwargs.get("compress_level", 9) + + if compression is not None: + if compression < 0 or compression > 9: + raise ValueError("Invalid PNG compression level: %r" % compression) + kwargs["compress_level"] = compression + if quantize is not None: + for bits in range(1, 9): + if 2**bits == quantize: + break + else: + raise ValueError( + "PNG quantize must be power of two, " "not %r" % quantize + ) + kwargs["bits"] = bits + if interlaced: + logger.warning("PIL PNG writer cannot produce interlaced images.") + + ok_keys = ( + "optimize", + "transparency", + "dpi", + "pnginfo", + "bits", + "compress_level", + "icc_profile", + "dictionary", + "prefer_uint8", + ) + for key in kwargs: + if key not in ok_keys: + raise TypeError("Invalid arg for PNG writer: %r" % key) + + PillowFormat.Writer._open(self) + self._meta.update(kwargs) + + def _append_data(self, im, meta): + if str(im.dtype) == "uint16" and (im.ndim == 2 or im.shape[-1] == 1): + im = image_as_uint(im, bitdepth=16) + else: + im = image_as_uint(im, bitdepth=8) + PillowFormat.Writer._append_data(self, im, meta) + + +class JPEGFormat(PillowFormat): + """See :mod:`imageio.plugins.pillow_legacy`""" + + class Reader(PillowFormat.Reader): + def _open(self, pilmode=None, as_gray=False, exifrotate=True): + return PillowFormat.Reader._open(self, pilmode=pilmode, as_gray=as_gray) + + def _get_file(self): + # Pillow uses seek for JPG, so we cannot directly stream from web + if self.request.filename.startswith( + ("http://", "https://") + ) or ".zip/" in self.request.filename.replace("\\", "/"): + self._we_own_fp = True + return open(self.request.get_local_filename(), "rb") + else: + self._we_own_fp = False + return self.request.get_file() + + def _get_data(self, index): + im, info = PillowFormat.Reader._get_data(self, index) + + # Handle exif + if "exif" in info: + from PIL.ExifTags import TAGS + + info["EXIF_MAIN"] = {} + for tag, value in self._im._getexif().items(): + decoded = TAGS.get(tag, tag) + info["EXIF_MAIN"][decoded] = value + + im = self._rotate(im, info) + return im, info + + def _rotate(self, im, meta): + """Use Orientation information from EXIF meta data to + orient the image correctly. Similar code as in FreeImage plugin. + """ + if self.request.kwargs.get("exifrotate", True): + try: + ori = meta["EXIF_MAIN"]["Orientation"] + except KeyError: # pragma: no cover + pass # Orientation not available + else: # pragma: no cover - we cannot touch all cases + # www.impulseadventure.com/photo/exif-orientation.html + if ori in [1, 2]: + pass + if ori in [3, 4]: + im = np.rot90(im, 2) + if ori in [5, 6]: + im = np.rot90(im, 3) + if ori in [7, 8]: + im = np.rot90(im) + if ori in [2, 4, 5, 7]: # Flipped cases (rare) + im = np.fliplr(im) + return im + + # -- + + class Writer(PillowFormat.Writer): + def _open(self, quality=75, progressive=False, optimize=False, **kwargs): + # The JPEG quality can be between 0 (worst) and 100 (best) + quality = int(quality) + if quality < 0 or quality > 100: + raise ValueError("JPEG quality should be between 0 and 100.") + + kwargs["quality"] = quality + kwargs["progressive"] = bool(progressive) + kwargs["optimize"] = bool(progressive) + + PillowFormat.Writer._open(self) + self._meta.update(kwargs) + + def _append_data(self, im, meta): + if im.ndim == 3 and im.shape[-1] == 4: + raise IOError("JPEG does not support alpha channel.") + im = image_as_uint(im, bitdepth=8) + PillowFormat.Writer._append_data(self, im, meta) + return + + +class JPEG2000Format(PillowFormat): + """See :mod:`imageio.plugins.pillow_legacy`""" + + class Reader(PillowFormat.Reader): + def _open(self, pilmode=None, as_gray=False): + return PillowFormat.Reader._open(self, pilmode=pilmode, as_gray=as_gray) + + def _get_file(self): + # Pillow uses seek for JPG, so we cannot directly stream from web + if self.request.filename.startswith( + ("http://", "https://") + ) or ".zip/" in self.request.filename.replace("\\", "/"): + self._we_own_fp = True + return open(self.request.get_local_filename(), "rb") + else: + self._we_own_fp = False + return self.request.get_file() + + def _get_data(self, index): + im, info = PillowFormat.Reader._get_data(self, index) + + # Handle exif + if "exif" in info: + from PIL.ExifTags import TAGS + + info["EXIF_MAIN"] = {} + for tag, value in self._im._getexif().items(): + decoded = TAGS.get(tag, tag) + info["EXIF_MAIN"][decoded] = value + + im = self._rotate(im, info) + return im, info + + def _rotate(self, im, meta): + """Use Orientation information from EXIF meta data to + orient the image correctly. Similar code as in FreeImage plugin. + """ + if self.request.kwargs.get("exifrotate", True): + try: + ori = meta["EXIF_MAIN"]["Orientation"] + except KeyError: # pragma: no cover + pass # Orientation not available + else: # pragma: no cover - we cannot touch all cases + # www.impulseadventure.com/photo/exif-orientation.html + if ori in [1, 2]: + pass + if ori in [3, 4]: + im = np.rot90(im, 2) + if ori in [5, 6]: + im = np.rot90(im, 3) + if ori in [7, 8]: + im = np.rot90(im) + if ori in [2, 4, 5, 7]: # Flipped cases (rare) + im = np.fliplr(im) + return im + + # -- + + class Writer(PillowFormat.Writer): + def _open(self, quality_mode="rates", quality=5, **kwargs): + # Check quality - in Pillow it should be no higher than 95 + if quality_mode not in {"rates", "dB"}: + raise ValueError("Quality mode should be either 'rates' or 'dB'") + + quality = float(quality) + + if quality_mode == "rates" and (quality < 1 or quality > 1000): + raise ValueError( + "The quality value {} seems to be an invalid rate!".format(quality) + ) + elif quality_mode == "dB" and (quality < 15 or quality > 100): + raise ValueError( + "The quality value {} seems to be an invalid PSNR!".format(quality) + ) + + kwargs["quality_mode"] = quality_mode + kwargs["quality_layers"] = [quality] + + PillowFormat.Writer._open(self) + self._meta.update(kwargs) + + def _append_data(self, im, meta): + if im.ndim == 3 and im.shape[-1] == 4: + raise IOError( + "The current implementation of JPEG 2000 does not support alpha channel." + ) + im = image_as_uint(im, bitdepth=8) + PillowFormat.Writer._append_data(self, im, meta) + return + + +def save_pillow_close(im): + # see issue #216 and #300 + if hasattr(im, "close"): + if hasattr(getattr(im, "fp", None), "close"): + im.close() + + +# Func from skimage + +# This cells contains code from scikit-image, in particular from +# http://github.com/scikit-image/scikit-image/blob/master/ +# skimage/io/_plugins/pil_plugin.py +# The scikit-image license applies. + + +def pil_try_read(im): + try: + # this will raise an IOError if the file is not readable + im.getdata()[0] + except IOError as e: + site = "http://pillow.readthedocs.io/en/latest/installation.html" + site += "#external-libraries" + pillow_error_message = str(e) + error_message = ( + 'Could not load "%s" \n' + 'Reason: "%s"\n' + "Please see documentation at: %s" + % (im.filename, pillow_error_message, site) + ) + raise ValueError(error_message) + + +def _palette_is_grayscale(pil_image): + if pil_image.mode != "P": + return False + elif pil_image.info.get("transparency", None): # see issue #475 + return False + # get palette as an array with R, G, B columns + # Note: starting in pillow 9.1 palettes may have less than 256 entries + palette = np.asarray(pil_image.getpalette()).reshape((-1, 3)) + # Not all palette colors are used; unused colors have junk values. + start, stop = pil_image.getextrema() + valid_palette = palette[start : stop + 1] + # Image is grayscale if channel differences (R - G and G - B) + # are all zero. + return np.allclose(np.diff(valid_palette), 0) + + +def pil_get_frame(im, is_gray=None, as_gray=None, mode=None, dtype=None): + """ + is_gray: Whether the image *is* gray (by inspecting its palette). + as_gray: Whether the resulting image must be converted to gaey. + mode: The mode to convert to. + """ + + if is_gray is None: + is_gray = _palette_is_grayscale(im) + + frame = im + + # Convert ... + if mode is not None: + # Mode is explicitly given ... + if mode != im.mode: + frame = im.convert(mode) + elif as_gray: + pass # don't do any auto-conversions (but do the explicit one above) + elif im.mode == "P" and is_gray: + # Paletted images that are already gray by their palette + # are converted so that the resulting numpy array is 2D. + frame = im.convert("L") + elif im.mode == "P": + # Paletted images are converted to RGB/RGBA. We jump some loops to make + # this work well. + if im.info.get("transparency", None) is not None: + # Let Pillow apply the transparency, see issue #210 and #246 + frame = im.convert("RGBA") + elif im.palette.mode in ("RGB", "RGBA"): + # We can do this ourselves. Pillow seems to sometimes screw + # this up if a multi-gif has a palette for each frame ... + # Create palette array + p = np.frombuffer(im.palette.getdata()[1], np.uint8) + # Restore the raw mode that was saved to be used to parse the palette + if hasattr(im.palette, "rawmode_saved"): + im.palette.rawmode = im.palette.rawmode_saved + mode = im.palette.rawmode if im.palette.rawmode else im.palette.mode + nchannels = len(mode) + # Shape it. + p.shape = -1, nchannels + if p.shape[1] == 3 or (p.shape[1] == 4 and mode[-1] == "X"): + p = np.column_stack((p[:, :3], 255 * np.ones(p.shape[0], p.dtype))) + # Swap the axes if the mode is in BGR and not RGB + if mode.startswith("BGR"): + p = p[:, [2, 1, 0]] if p.shape[1] == 3 else p[:, [2, 1, 0, 3]] + # Apply palette + frame_paletted = np.array(im, np.uint8) + try: + frame = p[frame_paletted] + except Exception: + # Ok, let PIL do it. The introduction of the branch that + # tests `im.info['transparency']` should make this happen + # much less often, but let's keep it, to be safe. + frame = im.convert("RGBA") + else: + # Let Pillow do it. Unlinke skimage, we always convert + # to RGBA; palettes can be RGBA. + if True: # im.format == 'PNG' and 'transparency' in im.info: + frame = im.convert("RGBA") + else: + frame = im.convert("RGB") + elif "A" in im.mode: + frame = im.convert("RGBA") + elif im.mode == "CMYK": + frame = im.convert("RGB") + elif im.format == "GIF" and im.mode == "RGB": + # pillow9 returns RGBA images for subsequent frames so that it can deal + # with multi-frame GIF that use frame-level palettes and don't dispose + # all areas. + + # For backwards compatibility, we promote everything to RGBA. + frame = im.convert("RGBA") + + # Apply a post-convert if necessary + if as_gray: + frame = frame.convert("F") # Scipy compat + elif not isinstance(frame, np.ndarray) and frame.mode == "1": + # Workaround for crash in PIL. When im is 1-bit, the call array(im) + # can cause a segfault, or generate garbage. See + # https://github.com/scipy/scipy/issues/2138 and + # https://github.com/python-pillow/Pillow/issues/350. + # + # This converts im from a 1-bit image to an 8-bit image. + frame = frame.convert("L") + + # Convert to numpy array + if im.mode.startswith("I;16"): + # e.g. in16 PNG's + shape = im.size + dtype = ">u2" if im.mode.endswith("B") else "= 0: + arr = arr.astype(np.uint8) + mode = mode_base = "L" + + else: + arr = image_as_uint(arr, bitdepth=16) + + else: + arr = image_as_uint(arr, bitdepth=8) + mode = "L" + mode_base = "L" + + if mode == "I;16" and int(getattr(Image, "__version__", "0").split(".")[0]) < 6: + # Pillow < v6.0.0 has limited support for the "I;16" mode, + # requiring us to fall back to this expensive workaround. + # tobytes actually creates a copy of the image, which is costly. + array_buffer = arr.tobytes() + if arr.ndim == 2: + im = Image.new(mode_base, arr.T.shape) + im.frombytes(array_buffer, "raw", mode) + else: + image_shape = (arr.shape[1], arr.shape[0]) + im = Image.frombytes(mode, image_shape, array_buffer) + return im + else: + return Image.fromarray(arr, mode) + + +# imported for backwards compatibility +from .pillowmulti import GIFFormat, TIFFFormat # noqa: E402, F401 diff --git a/.venv/Lib/site-packages/imageio/plugins/pillowmulti.py b/.venv/Lib/site-packages/imageio/plugins/pillowmulti.py new file mode 100644 index 00000000..e41c7107 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/pillowmulti.py @@ -0,0 +1,338 @@ +""" +PIL formats for multiple images. +""" + +import logging + +import numpy as np + +from .pillow_legacy import PillowFormat, image_as_uint, ndarray_to_pil + +logger = logging.getLogger(__name__) + +NeuQuant = None # we can implement this when we need it + + +class TIFFFormat(PillowFormat): + _modes = "i" # arg, why bother; people should use the tiffile version + _description = "TIFF format (Pillow)" + + +class GIFFormat(PillowFormat): + """See :mod:`imageio.plugins.pillow_legacy`""" + + _modes = "iI" + _description = "Static and animated gif (Pillow)" + + # GIF reader needs no modifications compared to base pillow reader + + class Writer(PillowFormat.Writer): # pragma: no cover + def _open( + self, + loop=0, + duration=None, + fps=10, + palettesize=256, + quantizer=0, + subrectangles=False, + ): + from PIL import __version__ as pillow_version + + major, minor, patch = tuple(int(x) for x in pillow_version.split(".")) + if major == 10 and minor >= 1: + raise ImportError( + f"Pillow v{pillow_version} is not supported by ImageIO's legacy " + "pillow plugin when writing GIFs. Consider switching to the new " + "plugin or downgrading to `pillow<10.1.0`." + ) + + # Check palettesize + palettesize = int(palettesize) + if palettesize < 2 or palettesize > 256: + raise ValueError("GIF quantize param must be 2..256") + if palettesize not in [2, 4, 8, 16, 32, 64, 128, 256]: + palettesize = 2 ** int(np.log2(128) + 0.999) + logger.warning( + "Warning: palettesize (%r) modified to a factor of " + "two between 2-256." % palettesize + ) + # Duratrion / fps + if duration is None: + self._duration = 1.0 / float(fps) + elif isinstance(duration, (list, tuple)): + self._duration = [float(d) for d in duration] + else: + self._duration = float(duration) + # loop + loop = float(loop) + if loop <= 0 or loop == float("inf"): + loop = 0 + loop = int(loop) + # Subrectangles / dispose + subrectangles = bool(subrectangles) + self._dispose = 1 if subrectangles else 2 + # The "0" (median cut) quantizer is by far the best + + fp = self.request.get_file() + self._writer = GifWriter( + fp, subrectangles, loop, quantizer, int(palettesize) + ) + + def _close(self): + self._writer.close() + + def _append_data(self, im, meta): + im = image_as_uint(im, bitdepth=8) + if im.ndim == 3 and im.shape[-1] == 1: + im = im[:, :, 0] + duration = self._duration + if isinstance(duration, list): + duration = duration[min(len(duration) - 1, self._writer._count)] + dispose = self._dispose + self._writer.add_image(im, duration, dispose) + + return + + +def intToBin(i): + return i.to_bytes(2, byteorder="little") + + +class GifWriter: # pragma: no cover + """Class that for helping write the animated GIF file. This is based on + code from images2gif.py (part of visvis). The version here is modified + to allow streamed writing. + """ + + def __init__( + self, + file, + opt_subrectangle=True, + opt_loop=0, + opt_quantizer=0, + opt_palette_size=256, + ): + self.fp = file + + self.opt_subrectangle = opt_subrectangle + self.opt_loop = opt_loop + self.opt_quantizer = opt_quantizer + self.opt_palette_size = opt_palette_size + + self._previous_image = None # as np array + self._global_palette = None # as bytes + self._count = 0 + + from PIL.GifImagePlugin import getdata + + self.getdata = getdata + + def add_image(self, im, duration, dispose): + # Prepare image + im_rect, rect = im, (0, 0) + if self.opt_subrectangle: + im_rect, rect = self.getSubRectangle(im) + im_pil = self.converToPIL(im_rect, self.opt_quantizer, self.opt_palette_size) + + # Get pallette - apparently, this is the 3d element of the header + # (but it has not always been). Best we've got. Its not the same + # as im_pil.palette.tobytes(). + from PIL.GifImagePlugin import getheader + + palette = getheader(im_pil)[0][3] + + # Write image + if self._count == 0: + self.write_header(im_pil, palette, self.opt_loop) + self._global_palette = palette + self.write_image(im_pil, palette, rect, duration, dispose) + # assert len(palette) == len(self._global_palette) + + # Bookkeeping + self._previous_image = im + self._count += 1 + + def write_header(self, im, globalPalette, loop): + # Gather info + header = self.getheaderAnim(im) + appext = self.getAppExt(loop) + # Write + self.fp.write(header) + self.fp.write(globalPalette) + self.fp.write(appext) + + def close(self): + self.fp.write(";".encode("utf-8")) # end gif + + def write_image(self, im, palette, rect, duration, dispose): + fp = self.fp + + # Gather local image header and data, using PIL's getdata. That + # function returns a list of bytes objects, but which parts are + # what has changed multiple times, so we put together the first + # parts until we have enough to form the image header. + data = self.getdata(im) + imdes = b"" + while data and len(imdes) < 11: + imdes += data.pop(0) + assert len(imdes) == 11 + + # Make image descriptor suitable for using 256 local color palette + lid = self.getImageDescriptor(im, rect) + graphext = self.getGraphicsControlExt(duration, dispose) + + # Write local header + if (palette != self._global_palette) or (dispose != 2): + # Use local color palette + fp.write(graphext) + fp.write(lid) # write suitable image descriptor + fp.write(palette) # write local color table + fp.write(b"\x08") # LZW minimum size code + else: + # Use global color palette + fp.write(graphext) + fp.write(imdes) # write suitable image descriptor + + # Write image data + for d in data: + fp.write(d) + + def getheaderAnim(self, im): + """Get animation header. To replace PILs getheader()[0]""" + bb = b"GIF89a" + bb += intToBin(im.size[0]) + bb += intToBin(im.size[1]) + bb += b"\x87\x00\x00" + return bb + + def getImageDescriptor(self, im, xy=None): + """Used for the local color table properties per image. + Otherwise global color table applies to all frames irrespective of + whether additional colors comes in play that require a redefined + palette. Still a maximum of 256 color per frame, obviously. + + Written by Ant1 on 2010-08-22 + Modified by Alex Robinson in Janurari 2011 to implement subrectangles. + """ + + # Defaule use full image and place at upper left + if xy is None: + xy = (0, 0) + + # Image separator, + bb = b"\x2C" + + # Image position and size + bb += intToBin(xy[0]) # Left position + bb += intToBin(xy[1]) # Top position + bb += intToBin(im.size[0]) # image width + bb += intToBin(im.size[1]) # image height + + # packed field: local color table flag1, interlace0, sorted table0, + # reserved00, lct size111=7=2^(7 + 1)=256. + bb += b"\x87" + + # LZW minimum size code now comes later, begining of [imagedata] blocks + return bb + + def getAppExt(self, loop): + """Application extension. This part specifies the amount of loops. + If loop is 0 or inf, it goes on infinitely. + """ + if loop == 1: + return b"" + if loop == 0: + loop = 2**16 - 1 + bb = b"" + if loop != 0: # omit the extension if we would like a nonlooping gif + bb = b"\x21\xFF\x0B" # application extension + bb += b"NETSCAPE2.0" + bb += b"\x03\x01" + bb += intToBin(loop) + bb += b"\x00" # end + return bb + + def getGraphicsControlExt(self, duration=0.1, dispose=2): + """Graphics Control Extension. A sort of header at the start of + each image. Specifies duration and transparancy. + + Dispose + ------- + * 0 - No disposal specified. + * 1 - Do not dispose. The graphic is to be left in place. + * 2 - Restore to background color. The area used by the graphic + must be restored to the background color. + * 3 - Restore to previous. The decoder is required to restore the + area overwritten by the graphic with what was there prior to + rendering the graphic. + * 4-7 -To be defined. + """ + + bb = b"\x21\xF9\x04" + bb += chr((dispose & 3) << 2).encode("utf-8") + # low bit 1 == transparency, + # 2nd bit 1 == user input , next 3 bits, the low two of which are used, + # are dispose. + bb += intToBin(int(duration * 100 + 0.5)) # in 100th of seconds + bb += b"\x00" # no transparant color + bb += b"\x00" # end + return bb + + def getSubRectangle(self, im): + """Calculate the minimal rectangle that need updating. Returns + a two-element tuple containing the cropped image and an x-y tuple. + + Calculating the subrectangles takes extra time, obviously. However, + if the image sizes were reduced, the actual writing of the GIF + goes faster. In some cases applying this method produces a GIF faster. + """ + + # Cannot do subrectangle for first image + if self._count == 0: + return im, (0, 0) + + prev = self._previous_image + + # Get difference, sum over colors + diff = np.abs(im - prev) + if diff.ndim == 3: + diff = diff.sum(2) + # Get begin and end for both dimensions + X = np.argwhere(diff.sum(0)) + Y = np.argwhere(diff.sum(1)) + # Get rect coordinates + if X.size and Y.size: + x0, x1 = int(X[0]), int(X[-1] + 1) + y0, y1 = int(Y[0]), int(Y[-1] + 1) + else: # No change ... make it minimal + x0, x1 = 0, 2 + y0, y1 = 0, 2 + + return im[y0:y1, x0:x1], (x0, y0) + + def converToPIL(self, im, quantizer, palette_size=256): + """Convert image to Paletted PIL image. + + PIL used to not do a very good job at quantization, but I guess + this has improved a lot (at least in Pillow). I don't think we need + neuqant (and we can add it later if we really want). + """ + + im_pil = ndarray_to_pil(im, "gif") + + if quantizer in ("nq", "neuquant"): + # NeuQuant algorithm + nq_samplefac = 10 # 10 seems good in general + im_pil = im_pil.convert("RGBA") # NQ assumes RGBA + nqInstance = NeuQuant(im_pil, nq_samplefac) # Learn colors + im_pil = nqInstance.quantize(im_pil, colors=palette_size) + elif quantizer in (0, 1, 2): + # Adaptive PIL algorithm + if quantizer == 2: + im_pil = im_pil.convert("RGBA") + else: + im_pil = im_pil.convert("RGB") + im_pil = im_pil.quantize(colors=palette_size, method=quantizer) + else: + raise ValueError("Invalid value for quantizer: %r" % quantizer) + return im_pil diff --git a/.venv/Lib/site-packages/imageio/plugins/pyav.py b/.venv/Lib/site-packages/imageio/plugins/pyav.py new file mode 100644 index 00000000..bc9b37df --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/pyav.py @@ -0,0 +1,1191 @@ +"""Read/Write Videos (and images) using PyAV. + +.. note:: + To use this plugin you need to have `PyAV `_ + installed:: + + pip install av + +This plugin wraps pyAV, a pythonic binding for the FFMPEG library. It is similar +to our FFMPEG plugin, has improved performance, features a robust interface, and +aims to supersede the FFMPEG plugin in the future. + + +Methods +------- +.. note:: + Check the respective function for a list of supported kwargs and detailed + documentation. + +.. autosummary:: + :toctree: + + PyAVPlugin.read + PyAVPlugin.iter + PyAVPlugin.write + PyAVPlugin.properties + PyAVPlugin.metadata + +Additional methods available inside the :func:`imopen ` +context: + +.. autosummary:: + :toctree: + + PyAVPlugin.init_video_stream + PyAVPlugin.write_frame + PyAVPlugin.set_video_filter + PyAVPlugin.container_metadata + PyAVPlugin.video_stream_metadata + +Advanced API +------------ + +In addition to the default ImageIO v3 API this plugin exposes custom functions +that are specific to reading/writing video and its metadata. These are available +inside the :func:`imopen ` context and allow fine-grained +control over how the video is processed. The functions are documented above and +below you can find a usage example:: + + import imageio.v3 as iio + + with iio.imopen("test.mp4", "w", plugin="pyav") as file: + file.init_video_stream("libx264") + file.container_metadata["comment"] = "This video was created using ImageIO." + + for _ in range(5): + for frame in iio.imiter("imageio:newtonscradle.gif"): + file.write_frame(frame) + + meta = iio.immeta("test.mp4", plugin="pyav") + assert meta["comment"] == "This video was created using ImageIO." + + + +Pixel Formats (Colorspaces) +--------------------------- + +By default, this plugin converts the video into 8-bit RGB (called ``rgb24`` in +ffmpeg). This is a useful behavior for many use-cases, but sometimes you may +want to use the video's native colorspace or you may wish to convert the video +into an entirely different colorspace. This is controlled using the ``format`` +kwarg. You can use ``format=None`` to leave the image in its native colorspace +or specify any colorspace supported by FFMPEG as long as it is stridable, i.e., +as long as it can be represented by a single numpy array. Some useful choices +include: + +- rgb24 (default; 8-bit RGB) +- rgb48le (16-bit lower-endian RGB) +- bgr24 (8-bit BGR; openCVs default colorspace) +- gray (8-bit grayscale) +- yuv444p (8-bit channel-first YUV) + +Further, FFMPEG maintains a list of available formats, albeit not as part of the +narrative docs. It can be `found here +`_ (warning: C source +code). + +Filters +------- + +On top of providing basic read/write functionality, this plugin allows you to +use the full collection of `video filters available in FFMPEG +`_. This means that you +can apply excessive preprocessing to your video before retrieving it as a numpy +array or apply excessive post-processing before you encode your data. + +Filters come in two forms: sequences or graphs. Filter sequences are, as the +name suggests, sequences of filters that are applied one after the other. They +are specified using the ``filter_sequence`` kwarg. Filter graphs, on the other +hand, come in the form of a directed graph and are specified using the +``filter_graph`` kwarg. + +.. note:: + All filters are either sequences or graphs. If all you want is to apply a + single filter, you can do this by specifying a filter sequence with a single + entry. + +A ``filter_sequence`` is a list of filters, each defined through a 2-element +tuple of the form ``(filter_name, filter_parameters)``. The first element of the +tuple is the name of the filter. The second element are the filter parameters, +which can be given either as a string or a dict. The string matches the same +format that you would use when specifying the filter using the ffmpeg +command-line tool and the dict has entries of the form ``parameter:value``. For +example:: + + import imageio.v3 as iio + + # using a filter_parameters str + img1 = iio.imread( + "imageio:cockatoo.mp4", + plugin="pyav", + filter_sequence=[ + ("rotate", "45*PI/180") + ] + ) + + # using a filter_parameters dict + img2 = iio.imread( + "imageio:cockatoo.mp4", + plugin="pyav", + filter_sequence=[ + ("rotate", {"angle":"45*PI/180", "fillcolor":"AliceBlue"}) + ] + ) + +A ``filter_graph``, on the other hand, is specified using a ``(nodes, edges)`` +tuple. It is best explained using an example:: + + img = iio.imread( + "imageio:cockatoo.mp4", + plugin="pyav", + filter_graph=( + { + "split": ("split", ""), + "scale_overlay":("scale", "512:-1"), + "overlay":("overlay", "x=25:y=25:enable='between(t,1,8)'"), + }, + [ + ("video_in", "split", 0, 0), + ("split", "overlay", 0, 0), + ("split", "scale_overlay", 1, 0), + ("scale_overlay", "overlay", 0, 1), + ("overlay", "video_out", 0, 0), + ] + ) + ) + +The above transforms the video to have picture-in-picture of itself in the top +left corner. As you can see, nodes are specified using a dict which has names as +its keys and filter tuples as values; the same tuples as the ones used when +defining a filter sequence. Edges are a list of a 4-tuples of the form +``(node_out, node_in, output_idx, input_idx)`` and specify which two filters are +connected and which inputs/outputs should be used for this. + +Further, there are two special nodes in a filter graph: ``video_in`` and +``video_out``, which represent the graph's input and output respectively. These +names can not be chosen for other nodes (those nodes would simply be +overwritten), and for a graph to be valid there must be a path from the input to +the output and all nodes in the graph must be connected. + +While most graphs are quite simple, they can become very complex and we +recommend that you read through the `FFMPEG documentation +`_ and their +examples to better understand how to use them. + +""" + +from fractions import Fraction +from math import ceil +from typing import Any, Dict, List, Optional, Tuple, Union, Generator + +import av +import av.filter +import numpy as np +from numpy.lib.stride_tricks import as_strided + +from ..core import Request +from ..core.request import URI_BYTES, InitializationError, IOMode +from ..core.v3_plugin_api import ImageProperties, PluginV3 + + +def _format_to_dtype(format: av.VideoFormat) -> np.dtype: + """Convert a pyAV video format into a numpy dtype""" + + if len(format.components) == 0: + # fake format + raise ValueError( + f"Can't determine dtype from format `{format.name}`. It has no channels." + ) + + endian = ">" if format.is_big_endian else "<" + dtype = "f" if "f32" in format.name else "u" + bits_per_channel = [x.bits for x in format.components] + n_bytes = str(int(ceil(bits_per_channel[0] / 8))) + + return np.dtype(endian + dtype + n_bytes) + + +def _get_frame_shape(frame: av.VideoFrame) -> Tuple[int, ...]: + """Compute the frame's array shape + + Parameters + ---------- + frame : av.VideoFrame + A frame for which the resulting shape should be computed. + + Returns + ------- + shape : Tuple[int, ...] + A tuple describing the shape of the image data in the frame. + + """ + + widths = [component.width for component in frame.format.components] + heights = [component.height for component in frame.format.components] + bits = np.array([component.bits for component in frame.format.components]) + line_sizes = [plane.line_size for plane in frame.planes] + + subsampled_width = widths[:-1] != widths[1:] + subsampled_height = heights[:-1] != heights[1:] + unaligned_components = np.any(bits % 8 != 0) or (line_sizes[:-1] != line_sizes[1:]) + if subsampled_width or subsampled_height or unaligned_components: + raise IOError( + f"{frame.format.name} can't be expressed as a strided array." + "Use `format=` to select a format to convert into." + ) + + shape = [frame.height, frame.width] + + # ffmpeg doesn't have a notion of channel-first or channel-last formats + # instead it stores frames in one or more planes which contain individual + # components of a pixel depending on the pixel format. For channel-first + # formats each component lives on a separate plane (n_planes) and for + # channel-last formats all components are packed on a single plane + # (n_channels) + n_planes = max([component.plane for component in frame.format.components]) + 1 + if n_planes > 1: + shape = [n_planes] + shape + + channels_per_plane = [0] * n_planes + for component in frame.format.components: + channels_per_plane[component.plane] += 1 + n_channels = max(channels_per_plane) + + if n_channels > 1: + shape = shape + [n_channels] + + return tuple(shape) + + +class PyAVPlugin(PluginV3): + """Support for pyAV as backend. + + Parameters + ---------- + request : iio.Request + A request object that represents the users intent. It provides a + standard interface to access various the various ImageResources and + serves them to the plugin as a file object (or file). Check the docs for + details. + container : str + Only used during `iio_mode="w"`! If not None, overwrite the default container + format chosen by pyav. + kwargs : Any + Additional kwargs are forwarded to PyAV's constructor. + + """ + + def __init__(self, request: Request, *, container: str = None, **kwargs) -> None: + """Initialize a new Plugin Instance. + + See Plugin's docstring for detailed documentation. + + Notes + ----- + The implementation here stores the request as a local variable that is + exposed using a @property below. If you inherit from PluginV3, remember + to call ``super().__init__(request)``. + + """ + + super().__init__(request) + + self._container = None + self._video_stream = None + self._video_filter = None + + if request.mode.io_mode == IOMode.read: + self._next_idx = 0 + try: + if request._uri_type == 5: # 5 is the value of URI_HTTP + # pyav should read from HTTP by itself. This enables reading + # HTTP-based streams like DASH. Note that solving streams + # like this is temporary until the new request object gets + # implemented. + self._container = av.open(request.raw_uri, **kwargs) + else: + self._container = av.open(request.get_file(), **kwargs) + self._video_stream = self._container.streams.video[0] + self._decoder = self._container.decode(video=0) + except av.AVError: + if isinstance(request.raw_uri, bytes): + msg = "PyAV does not support these ``" + else: + msg = f"PyAV does not support `{request.raw_uri}`" + raise InitializationError(msg) from None + else: + self.frames_written = 0 + file_handle = self.request.get_file() + filename = getattr(file_handle, "name", None) + extension = self.request.extension or self.request.format_hint + if extension is None: + raise InitializationError("Can't determine output container to use.") + + # hacky, but beats running our own format selection logic + # (since av_guess_format is not exposed) + try: + setattr(file_handle, "name", filename or "tmp" + extension) + except AttributeError: + pass # read-only, nothing we can do + + try: + self._container = av.open( + file_handle, mode="w", format=container, **kwargs + ) + except ValueError: + raise InitializationError( + f"PyAV can not write to `{self.request.raw_uri}`" + ) + + # --------------------- + # Standard V3 Interface + # --------------------- + + def read( + self, + *, + index: int = ..., + format: str = "rgb24", + filter_sequence: List[Tuple[str, Union[str, dict]]] = None, + filter_graph: Tuple[dict, List] = None, + constant_framerate: bool = None, + thread_count: int = 0, + thread_type: str = None, + ) -> np.ndarray: + """Read frames from the video. + + If ``index`` is an integer, this function reads the index-th frame from + the file. If ``index`` is ... (Ellipsis), this function reads all frames + from the video, stacks them along the first dimension, and returns a + batch of frames. + + Parameters + ---------- + index : int + The index of the frame to read, e.g. ``index=5`` reads the 5th + frame. If ``...``, read all the frames in the video and stack them + along a new, prepended, batch dimension. + format : str + Set the returned colorspace. If not None (default: rgb24), convert + the data into the given format before returning it. If ``None`` + return the data in the encoded format if it can be expressed as a + strided array; otherwise raise an Exception. + filter_sequence : List[str, str, dict] + If not None, apply the given sequence of FFmpeg filters to each + ndimage. Check the (module-level) plugin docs for details and + examples. + filter_graph : (dict, List) + If not None, apply the given graph of FFmpeg filters to each + ndimage. The graph is given as a tuple of two dicts. The first dict + contains a (named) set of nodes, and the second dict contains a set + of edges between nodes of the previous dict. Check the (module-level) + plugin docs for details and examples. + constant_framerate : bool + If True assume the video's framerate is constant. This allows for + faster seeking inside the file. If False, the video is reset before + each read and searched from the beginning. If None (default), this + value will be read from the container format. + thread_count : int + How many threads to use when decoding a frame. The default is 0, + which will set the number using ffmpeg's default, which is based on + the codec, number of available cores, threadding model, and other + considerations. + thread_type : str + The threading model to be used. One of + + - `"SLICE"`: threads assemble parts of the current frame + - `"FRAME"`: threads may assemble future frames + - None (default): Uses ``"FRAME"`` if ``index=...`` and ffmpeg's + default otherwise. + + + Returns + ------- + frame : np.ndarray + A numpy array containing loaded frame data. + + Notes + ----- + Accessing random frames repeatedly is costly (O(k), where k is the + average distance between two keyframes). You should do so only sparingly + if possible. In some cases, it can be faster to bulk-read the video (if + it fits into memory) and to then access the returned ndarray randomly. + + The current implementation may cause problems for b-frames, i.e., + bidirectionaly predicted pictures. I lack test videos to write unit + tests for this case. + + Reading from an index other than ``...``, i.e. reading a single frame, + currently doesn't support filters that introduce delays. + + """ + + if index is ...: + props = self.properties(format=format) + uses_filter = ( + self._video_filter is not None + or filter_graph is not None + or filter_sequence is not None + ) + + self._container.seek(0) + if not uses_filter and props.shape[0] != 0: + frames = np.empty(props.shape, dtype=props.dtype) + for idx, frame in enumerate( + self.iter( + format=format, + filter_sequence=filter_sequence, + filter_graph=filter_graph, + thread_count=thread_count, + thread_type=thread_type or "FRAME", + ) + ): + frames[idx] = frame + else: + frames = np.stack( + [ + x + for x in self.iter( + format=format, + filter_sequence=filter_sequence, + filter_graph=filter_graph, + thread_count=thread_count, + thread_type=thread_type or "FRAME", + ) + ] + ) + + # reset stream container, because threading model can't change after + # first access + self._video_stream.close() + self._video_stream = self._container.streams.video[0] + + return frames + + if thread_type is not None and thread_type != self._video_stream.thread_type: + self._video_stream.thread_type = thread_type + if ( + thread_count != 0 + and thread_count != self._video_stream.codec_context.thread_count + ): + # in FFMPEG thread_count == 0 means use the default count, which we + # change to mean don't change the thread count. + self._video_stream.codec_context.thread_count = thread_count + + if constant_framerate is None: + constant_framerate = not self._container.format.variable_fps + + # note: cheap for contigous incremental reads + self._seek(index, constant_framerate=constant_framerate) + desired_frame = next(self._decoder) + self._next_idx += 1 + + self.set_video_filter(filter_sequence, filter_graph) + if self._video_filter is not None: + desired_frame = self._video_filter.send(desired_frame) + + return self._unpack_frame(desired_frame, format=format) + + def iter( + self, + *, + format: str = "rgb24", + filter_sequence: List[Tuple[str, Union[str, dict]]] = None, + filter_graph: Tuple[dict, List] = None, + thread_count: int = 0, + thread_type: str = None, + ) -> np.ndarray: + """Yield frames from the video. + + Parameters + ---------- + frame : np.ndarray + A numpy array containing loaded frame data. + format : str + Convert the data into the given format before returning it. If None, + return the data in the encoded format if it can be expressed as a + strided array; otherwise raise an Exception. + filter_sequence : List[str, str, dict] + Set the returned colorspace. If not None (default: rgb24), convert + the data into the given format before returning it. If ``None`` + return the data in the encoded format if it can be expressed as a + strided array; otherwise raise an Exception. + filter_graph : (dict, List) + If not None, apply the given graph of FFmpeg filters to each + ndimage. The graph is given as a tuple of two dicts. The first dict + contains a (named) set of nodes, and the second dict contains a set + of edges between nodes of the previous dict. Check the (module-level) + plugin docs for details and examples. + thread_count : int + How many threads to use when decoding a frame. The default is 0, + which will set the number using ffmpeg's default, which is based on + the codec, number of available cores, threadding model, and other + considerations. + thread_type : str + The threading model to be used. One of + + - `"SLICE"` (default): threads assemble parts of the current frame + - `"FRAME"`: threads may assemble future frames (faster for bulk reading) + + + Yields + ------ + frame : np.ndarray + A (decoded) video frame. + + + """ + + self._video_stream.thread_type = thread_type or "SLICE" + self._video_stream.codec_context.thread_count = thread_count + + self.set_video_filter(filter_sequence, filter_graph) + + for frame in self._decoder: + self._next_idx += 1 + + if self._video_filter is not None: + try: + frame = self._video_filter.send(frame) + except StopIteration: + break + + if frame is None: + continue + + yield self._unpack_frame(frame, format=format) + + if self._video_filter is not None: + for frame in self._video_filter: + yield self._unpack_frame(frame, format=format) + + def write( + self, + ndimage: Union[np.ndarray, List[np.ndarray]], + *, + codec: str = None, + is_batch: bool = True, + fps: int = 24, + in_pixel_format: str = "rgb24", + out_pixel_format: str = None, + filter_sequence: List[Tuple[str, Union[str, dict]]] = None, + filter_graph: Tuple[dict, List] = None, + ) -> Optional[bytes]: + """Save a ndimage as a video. + + Given a batch of frames (stacked along the first axis) or a list of + frames, encode them and add the result to the ImageResource. + + Parameters + ---------- + ndimage : ArrayLike, List[ArrayLike] + The ndimage to encode and write to the ImageResource. + codec : str + The codec to use when encoding frames. Only needed on first write + and ignored on subsequent writes. + is_batch : bool + If True (default), the ndimage is a batch of images, otherwise it is + a single image. This parameter has no effect on lists of ndimages. + fps : str + The resulting videos frames per second. + in_pixel_format : str + The pixel format of the incoming ndarray. Defaults to "rgb24" and can + be any stridable pix_fmt supported by FFmpeg. + out_pixel_format : str + The pixel format to use while encoding frames. If None (default) + use the codec's default. + filter_sequence : List[str, str, dict] + If not None, apply the given sequence of FFmpeg filters to each + ndimage. Check the (module-level) plugin docs for details and + examples. + filter_graph : (dict, List) + If not None, apply the given graph of FFmpeg filters to each + ndimage. The graph is given as a tuple of two dicts. The first dict + contains a (named) set of nodes, and the second dict contains a set + of edges between nodes of the previous dict. Check the (module-level) + plugin docs for details and examples. + + Returns + ------- + encoded_image : bytes or None + If the chosen ImageResource is the special target ``""`` then + write will return a byte string containing the encoded image data. + Otherwise, it returns None. + + Notes + ----- + When writing ````, the video is finalized immediately after the + first write call and calling write multiple times to append frames is + not possible. + + """ + + if isinstance(ndimage, list): + # frames shapes must agree for video + if any(f.shape != ndimage[0].shape for f in ndimage): + raise ValueError("All frames should have the same shape") + elif not is_batch: + ndimage = np.asarray(ndimage)[None, ...] + else: + ndimage = np.asarray(ndimage) + + if self._video_stream is None: + self.init_video_stream(codec, fps=fps, pixel_format=out_pixel_format) + + self.set_video_filter(filter_sequence, filter_graph) + + for img in ndimage: + self.write_frame(img, pixel_format=in_pixel_format) + + if self.request._uri_type == URI_BYTES: + # bytes are immutuable, so we have to flush immediately + # and can't support appending + self._flush_writer() + self._container.close() + + return self.request.get_file().getvalue() + + def properties(self, index: int = ..., *, format: str = "rgb24") -> ImageProperties: + """Standardized ndimage metadata. + + Parameters + ---------- + index : int + The index of the ndimage for which to return properties. If ``...`` + (Ellipsis, default), return the properties for the resulting batch + of frames. + format : str + If not None (default: rgb24), convert the data into the given format + before returning it. If None return the data in the encoded format + if that can be expressed as a strided array; otherwise raise an + Exception. + + Returns + ------- + properties : ImageProperties + A dataclass filled with standardized image metadata. + + Notes + ----- + This function is efficient and won't process any pixel data. + + The provided metadata does not include modifications by any filters + (through ``filter_sequence`` or ``filter_graph``). + + """ + + video_width = self._video_stream.codec_context.width + video_height = self._video_stream.codec_context.height + pix_format = format or self._video_stream.codec_context.pix_fmt + frame_template = av.VideoFrame(video_width, video_height, pix_format) + + shape = _get_frame_shape(frame_template) + if index is ...: + n_frames = self._video_stream.frames + shape = (n_frames,) + shape + + return ImageProperties( + shape=tuple(shape), + dtype=_format_to_dtype(frame_template.format), + n_images=shape[0] if index is ... else None, + is_batch=index is ..., + ) + + def metadata( + self, + index: int = ..., + exclude_applied: bool = True, + constant_framerate: bool = None, + ) -> Dict[str, Any]: + """Format-specific metadata. + + Returns a dictionary filled with metadata that is either stored in the + container, the video stream, or the frame's side-data. + + Parameters + ---------- + index : int + If ... (Ellipsis, default) return global metadata (the metadata + stored in the container and video stream). If not ..., return the + side data stored in the frame at the given index. + exclude_applied : bool + Currently, this parameter has no effect. It exists for compliance with + the ImageIO v3 API. + constant_framerate : bool + If True assume the video's framerate is constant. This allows for + faster seeking inside the file. If False, the video is reset before + each read and searched from the beginning. If None (default), this + value will be read from the container format. + + Returns + ------- + metadata : dict + A dictionary filled with format-specific metadata fields and their + values. + + """ + + metadata = dict() + + if index is ...: + # useful flags defined on the container and/or video stream + metadata.update( + { + "video_format": self._video_stream.codec_context.pix_fmt, + "codec": self._video_stream.codec.name, + "long_codec": self._video_stream.codec.long_name, + "profile": self._video_stream.profile, + "fps": float(self._video_stream.guessed_rate), + } + ) + if self._video_stream.duration is not None: + duration = float( + self._video_stream.duration * self._video_stream.time_base + ) + metadata.update({"duration": duration}) + + metadata.update(self.container_metadata) + metadata.update(self.video_stream_metadata) + return metadata + + if constant_framerate is None: + constant_framerate = not self._container.format.variable_fps + + self._seek(index, constant_framerate=constant_framerate) + desired_frame = next(self._decoder) + self._next_idx += 1 + + # useful flags defined on the frame + metadata.update( + { + "key_frame": bool(desired_frame.key_frame), + "time": desired_frame.time, + "interlaced_frame": bool(desired_frame.interlaced_frame), + "frame_type": desired_frame.pict_type.name, + } + ) + + # side data + metadata.update( + {item.type.name: item.to_bytes() for item in desired_frame.side_data} + ) + + return metadata + + def close(self) -> None: + """Close the Video.""" + + is_write = self.request.mode.io_mode == IOMode.write + if is_write and self._video_stream is not None: + self._flush_writer() + + if self._container is not None: + self._container.close() + self.request.finish() + + def __enter__(self) -> "PyAVPlugin": + return super().__enter__() + + # ------------------------------ + # Add-on Interface inside imopen + # ------------------------------ + + def init_video_stream( + self, + codec: str, + *, + fps: float = 24, + pixel_format: str = None, + max_keyframe_interval: int = None, + force_keyframes: bool = None, + ) -> None: + """Initialize a new video stream. + + This function adds a new video stream to the ImageResource using the + selected encoder (codec), framerate, and colorspace. + + Parameters + ---------- + codec : str + The codec to use, e.g. ``"libx264"`` or ``"vp9"``. + fps : float + The desired framerate of the video stream (frames per second). + pixel_format : str + The pixel format to use while encoding frames. If None (default) use + the codec's default. + max_keyframe_interval : int + The maximum distance between two intra frames (I-frames). Also known + as GOP size. If unspecified use the codec's default. Note that not + every I-frame is a keyframe; see the notes for details. + force_keyframes : bool + If True, limit inter frames dependency to frames within the current + keyframe interval (GOP), i.e., force every I-frame to be a keyframe. + If unspecified, use the codec's default. + + Notes + ----- + You can usually leave ``max_keyframe_interval`` and ``force_keyframes`` + at their default values, unless you try to generate seek-optimized video + or have a similar specialist use-case. In this case, ``force_keyframes`` + controls the ability to seek to _every_ I-frame, and + ``max_keyframe_interval`` controls how close to a random frame you can + seek. Low values allow more fine-grained seek at the expense of + file-size (and thus I/O performance). + + """ + + stream = self._container.add_stream(codec, fps) + stream.time_base = Fraction(1 / fps).limit_denominator(int(2**16 - 1)) + if pixel_format is not None: + stream.pix_fmt = pixel_format + if max_keyframe_interval is not None: + stream.gop_size = max_keyframe_interval + if force_keyframes is not None: + stream.closed_gop = force_keyframes + + self._video_stream = stream + + def write_frame(self, frame: np.ndarray, *, pixel_format: str = "rgb24") -> None: + """Add a frame to the video stream. + + This function appends a new frame to the video. It assumes that the + stream previously has been initialized. I.e., ``init_video_stream`` has + to be called before calling this function for the write to succeed. + + Parameters + ---------- + frame : np.ndarray + The image to be appended/written to the video stream. + pixel_format : str + The colorspace (pixel format) of the incoming frame. + + Notes + ----- + Frames may be held in a buffer, e.g., by the filter pipeline used during + writing or by FFMPEG to batch them prior to encoding. Make sure to + ``.close()`` the plugin or to use a context manager to ensure that all + frames are written to the ImageResource. + + """ + + # manual packing of ndarray into frame + # (this should live in pyAV, but it doesn't support all the formats we + # want and PRs there are slow) + pixel_format = av.VideoFormat(pixel_format) + img_dtype = _format_to_dtype(pixel_format) + width = frame.shape[2 if pixel_format.is_planar else 1] + height = frame.shape[1 if pixel_format.is_planar else 0] + av_frame = av.VideoFrame(width, height, pixel_format.name) + if pixel_format.is_planar: + for idx, plane in enumerate(av_frame.planes): + plane_array = np.frombuffer(plane, dtype=img_dtype) + plane_array = as_strided( + plane_array, + shape=(plane.height, plane.width), + strides=(plane.line_size, img_dtype.itemsize), + ) + plane_array[...] = frame[idx] + else: + if pixel_format.name.startswith("bayer_"): + # ffmpeg doesn't describe bayer formats correctly + # see https://github.com/imageio/imageio/issues/761#issuecomment-1059318851 + # and following for details. + n_channels = 1 + else: + n_channels = len(pixel_format.components) + + plane = av_frame.planes[0] + plane_shape = (plane.height, plane.width) + plane_strides = (plane.line_size, n_channels * img_dtype.itemsize) + if n_channels > 1: + plane_shape += (n_channels,) + plane_strides += (img_dtype.itemsize,) + + plane_array = as_strided( + np.frombuffer(plane, dtype=img_dtype), + shape=plane_shape, + strides=plane_strides, + ) + plane_array[...] = frame + + stream = self._video_stream + av_frame.time_base = stream.codec_context.time_base + av_frame.pts = self.frames_written + self.frames_written += 1 + + if self._video_filter is not None: + av_frame = self._video_filter.send(av_frame) + if av_frame is None: + return + + if stream.frames == 0: + stream.width = av_frame.width + stream.height = av_frame.height + + for packet in stream.encode(av_frame): + self._container.mux(packet) + + def set_video_filter( + self, + filter_sequence: List[Tuple[str, Union[str, dict]]] = None, + filter_graph: Tuple[dict, List] = None, + ) -> None: + """Set the filter(s) to use. + + This function creates a new FFMPEG filter graph to use when reading or + writing video. In the case of reading, frames are passed through the + filter graph before begin returned and, in case of writing, frames are + passed through the filter before being written to the video. + + Parameters + ---------- + filter_sequence : List[str, str, dict] + If not None, apply the given sequence of FFmpeg filters to each + ndimage. Check the (module-level) plugin docs for details and + examples. + filter_graph : (dict, List) + If not None, apply the given graph of FFmpeg filters to each + ndimage. The graph is given as a tuple of two dicts. The first dict + contains a (named) set of nodes, and the second dict contains a set + of edges between nodes of the previous dict. Check the + (module-level) plugin docs for details and examples. + + Notes + ----- + Changing a filter graph with lag during reading or writing will + currently cause frames in the filter queue to be lost. + + """ + + if filter_sequence is None and filter_graph is None: + self._video_filter = None + return + + if filter_sequence is None: + filter_sequence = list() + + node_descriptors: Dict[str, Tuple[str, Union[str, Dict]]] + edges: List[Tuple[str, str, int, int]] + if filter_graph is None: + node_descriptors, edges = dict(), [("video_in", "video_out", 0, 0)] + else: + node_descriptors, edges = filter_graph + + graph = av.filter.Graph() + + previous_node = graph.add_buffer(template=self._video_stream) + for filter_name, argument in filter_sequence: + if isinstance(argument, str): + current_node = graph.add(filter_name, argument) + else: + current_node = graph.add(filter_name, **argument) + previous_node.link_to(current_node) + previous_node = current_node + + nodes = dict() + nodes["video_in"] = previous_node + nodes["video_out"] = graph.add("buffersink") + for name, (filter_name, arguments) in node_descriptors.items(): + if isinstance(arguments, str): + nodes[name] = graph.add(filter_name, arguments) + else: + nodes[name] = graph.add(filter_name, **arguments) + + for from_note, to_node, out_idx, in_idx in edges: + nodes[from_note].link_to(nodes[to_node], out_idx, in_idx) + + graph.configure() + + def video_filter(): + # this starts a co-routine + # send frames using graph.send() + frame = yield None + + # send and receive frames in "parallel" + while frame is not None: + graph.push(frame) + try: + frame = yield graph.pull() + except av.error.BlockingIOError: + # filter has lag and needs more frames + frame = yield None + except av.error.EOFError: + break + + try: + # send EOF in av>=9.0 + graph.push(None) + except ValueError: # pragma: no cover + # handle av<9.0 + pass + + # all frames have been sent, empty the filter + while True: + try: + yield graph.pull() + except av.error.EOFError: + break # EOF + except av.error.BlockingIOError: # pragma: no cover + # handle av<9.0 + break + + self._video_filter = video_filter() + self._video_filter.send(None) + + @property + def container_metadata(self): + """Container-specific metadata. + + A dictionary containing metadata stored at the container level. + + """ + return self._container.metadata + + @property + def video_stream_metadata(self): + """Stream-specific metadata. + + A dictionary containing metadata stored at the stream level. + + """ + return self._video_stream.metadata + + # ------------------------------- + # Internals and private functions + # ------------------------------- + + def _unpack_frame(self, frame: av.VideoFrame, *, format: str = None) -> np.ndarray: + """Convert a av.VideoFrame into a ndarray + + Parameters + ---------- + frame : av.VideoFrame + The frame to unpack. + format : str + If not None, convert the frame to the given format before unpacking. + + """ + + if format is not None: + frame = frame.reformat(format=format) + + dtype = _format_to_dtype(frame.format) + shape = _get_frame_shape(frame) + + planes = list() + for idx in range(len(frame.planes)): + n_channels = sum( + [ + x.bits // (dtype.itemsize * 8) + for x in frame.format.components + if x.plane == idx + ] + ) + av_plane = frame.planes[idx] + plane_shape = (av_plane.height, av_plane.width) + plane_strides = (av_plane.line_size, n_channels * dtype.itemsize) + if n_channels > 1: + plane_shape += (n_channels,) + plane_strides += (dtype.itemsize,) + + np_plane = as_strided( + np.frombuffer(av_plane, dtype=dtype), + shape=plane_shape, + strides=plane_strides, + ) + planes.append(np_plane) + + if len(planes) > 1: + # Note: the planes *should* exist inside a contigous memory block + # somewhere inside av.Frame however pyAV does not appear to expose this, + # so we are forced to copy the planes individually instead of wrapping + # them :( + out = np.concatenate(planes).reshape(shape) + else: + out = planes[0] + + return out + + def _seek(self, index, *, constant_framerate: bool = True) -> Generator: + """Seeks to the frame at the given index.""" + + if index == self._next_idx: + return # fast path :) + + # we must decode at least once before we seek otherwise the + # returned frames become corrupt. + if self._next_idx == 0: + next(self._decoder) + self._next_idx += 1 + + if index == self._next_idx: + return # fast path :) + + # remove this branch until I find a way to efficiently find the next + # keyframe. keeping this as a reminder + # if self._next_idx < index and index < self._next_keyframe_idx: + # frames_to_yield = index - self._next_idx + if not constant_framerate and index > self._next_idx: + frames_to_yield = index - self._next_idx + elif not constant_framerate: + # seek backwards and can't link idx and pts + self._container.seek(0) + self._decoder = self._container.decode(video=0) + self._next_idx = 0 + + frames_to_yield = index + else: + # we know that the time between consecutive frames is constant + # hence we can link index and pts + + # how many pts lie between two frames + sec_delta = 1 / self._video_stream.guessed_rate + pts_delta = sec_delta / self._video_stream.time_base + + index_pts = int(index * pts_delta) + + # this only seeks to the closed (preceeding) keyframe + self._container.seek(index_pts, stream=self._video_stream) + self._decoder = self._container.decode(video=0) + + # this may be made faster if we could get the keyframe's time without + # decoding it + keyframe = next(self._decoder) + keyframe_time = keyframe.pts * keyframe.time_base + keyframe_pts = int(keyframe_time / self._video_stream.time_base) + keyframe_index = keyframe_pts // pts_delta + + self._container.seek(index_pts, stream=self._video_stream) + self._next_idx = keyframe_index + + frames_to_yield = index - keyframe_index + + for _ in range(frames_to_yield): + next(self._decoder) + self._next_idx += 1 + + def _flush_writer(self): + """Flush the filter and encoder + + This will reset the filter to `None` and send EoF to the encoder, + i.e., after calling, no more frames may be written. + + """ + + stream = self._video_stream + + if self._video_filter is not None: + # flush encoder + for av_frame in self._video_filter: + if stream.frames == 0: + stream.width = av_frame.width + stream.height = av_frame.height + for packet in stream.encode(av_frame): + self._container.mux(packet) + self._video_filter = None + + # flush stream + for packet in stream.encode(): + self._container.mux(packet) + self._video_stream = None diff --git a/.venv/Lib/site-packages/imageio/plugins/simpleitk.py b/.venv/Lib/site-packages/imageio/plugins/simpleitk.py new file mode 100644 index 00000000..dfaa066c --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/simpleitk.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read/Write images using SimpleITK. + +Backend: `Insight Toolkit `_ + +.. note:: + To use this plugin you have to install its backend:: + + pip install imageio[itk] + +The ItkFormat uses the ITK or SimpleITK library to support a range of +ITK-related formats. It also supports a few common formats (e.g. PNG and JPEG). + +Parameters +---------- +None + +""" + +from ..core import Format, has_module + +_itk = None # Defer loading to load_lib() function. + + +def load_lib(): + global _itk, _read_function, _write_function + try: + import itk as _itk + + _read_function = _itk.imread + _write_function = _itk.imwrite + except ImportError: + try: + import SimpleITK as _itk + + _read_function = _itk.ReadImage + _write_function = _itk.WriteImage + except ImportError: + raise ImportError( + "itk could not be found. " + "Please try " + " python -m pip install itk " + "or " + " python -m pip install simpleitk " + "or refer to " + " https://itkpythonpackage.readthedocs.io/ " + "for further instructions." + ) + return _itk + + +# Split up in real ITK and all supported formats. +ITK_FORMATS = ( + ".gipl", + ".ipl", + ".mha", + ".mhd", + ".nhdr", + "nia", + "hdr", + ".nrrd", + ".nii", + ".nii.gz", + ".img", + ".img.gz", + ".vtk", + "hdf5", + "lsm", + "mnc", + "mnc2", + "mgh", + "mnc", + "pic", +) +ALL_FORMATS = ITK_FORMATS + ( + ".bmp", + ".jpeg", + ".jpg", + ".png", + ".tiff", + ".tif", + ".dicom", + ".dcm", + ".gdcm", +) + + +class ItkFormat(Format): + """See :mod:`imageio.plugins.simpleitk`""" + + def _can_read(self, request): + # If the request is a format that only this plugin can handle, + # we report that we can do it; a useful error will be raised + # when simpleitk is not installed. For the more common formats + # we only report that we can read if the library is installed. + if request.extension in ITK_FORMATS: + return True + if has_module("itk.ImageIOBase") or has_module("SimpleITK"): + return request.extension in ALL_FORMATS + + def _can_write(self, request): + if request.extension in ITK_FORMATS: + return True + if has_module("itk.ImageIOBase") or has_module("SimpleITK"): + return request.extension in ALL_FORMATS + + # -- reader + + class Reader(Format.Reader): + def _open(self, pixel_type=None, fallback_only=None, **kwargs): + if not _itk: + load_lib() + args = () + if pixel_type is not None: + args += (pixel_type,) + if fallback_only is not None: + args += (fallback_only,) + self._img = _read_function(self.request.get_local_filename(), *args) + + def _get_length(self): + return 1 + + def _close(self): + pass + + def _get_data(self, index): + # Get data + if index != 0: + error_msg = "Index out of range while reading from itk file" + raise IndexError(error_msg) + + # Return array and empty meta data + return _itk.GetArrayFromImage(self._img), {} + + def _get_meta_data(self, index): + error_msg = "The itk plugin does not support meta data, currently." + raise RuntimeError(error_msg) + + # -- writer + class Writer(Format.Writer): + def _open(self): + if not _itk: + load_lib() + + def _close(self): + pass + + def _append_data(self, im, meta): + _itk_img = _itk.GetImageFromArray(im) + _write_function(_itk_img, self.request.get_local_filename()) + + def set_meta_data(self, meta): + error_msg = "The itk plugin does not support meta data, currently." + raise RuntimeError(error_msg) diff --git a/.venv/Lib/site-packages/imageio/plugins/spe.py b/.venv/Lib/site-packages/imageio/plugins/spe.py new file mode 100644 index 00000000..f56dd52e --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/spe.py @@ -0,0 +1,955 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read SPE files. + +This plugin supports reading files saved in the Princeton Instruments +SPE file format. + +Parameters +---------- +check_filesize : bool + The number of frames in the file is stored in the file header. However, + this number may be wrong for certain software. If this is `True` + (default), derive the number of frames also from the file size and + raise a warning if the two values do not match. +char_encoding : str + Deprecated. Exists for backwards compatibility; use ``char_encoding`` of + ``metadata`` instead. +sdt_meta : bool + Deprecated. Exists for backwards compatibility; use ``sdt_control`` of + ``metadata`` instead. + +Methods +------- +.. note:: + Check the respective function for a list of supported kwargs and detailed + documentation. + +.. autosummary:: + :toctree: + + SpePlugin.read + SpePlugin.iter + SpePlugin.properties + SpePlugin.metadata + +""" + +from datetime import datetime +import logging +import os +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) +import warnings + +import numpy as np + +from ..core.request import Request, IOMode, InitializationError +from ..core.v3_plugin_api import PluginV3, ImageProperties + + +logger = logging.getLogger(__name__) + + +class Spec: + """SPE file specification data + + Tuples of (offset, datatype, count), where offset is the offset in the SPE + file and datatype is the datatype as used in `numpy.fromfile`() + + `data_start` is the offset of actual image data. + + `dtypes` translates SPE datatypes (0...4) to numpy ones, e. g. dtypes[0] + is dtype(" Tuple[int, int]: + """Get the version of SDT-control metadata encoded in the comments + + Parameters + ---------- + comments + List of SPE file comments, typically ``metadata["comments"]``. + + Returns + ------- + Major and minor version. ``-1, -1`` if detection failed. + """ + if comments[4][70:76] != "COMVER": + return -1, -1 + try: + return int(comments[4][76:78]), int(comments[4][78:80]) + except ValueError: + return -1, -1 + + @staticmethod + def parse_comments( + comments: Sequence[str], version: Tuple[int, int] + ) -> Dict[str, Any]: + """Extract SDT-control metadata from comments + + Parameters + ---------- + comments + List of SPE file comments, typically ``metadata["comments"]``. + version + Major and minor version of SDT-control metadata format + + Returns + ------- + Dict of metadata + """ + sdt_md = {} + for minor in range(version[1] + 1): + # Metadata with same major version is backwards compatible. + # Fields are specified incrementally in `comment_fields`. + # E.g. if the file has version 5.01, `comment_fields[5, 0]` and + # `comment_fields[5, 1]` need to be decoded. + try: + cmt = __class__.comment_fields[version[0], minor] + except KeyError: + continue + for name, spec in cmt.items(): + try: + v = spec.cvt(comments[spec.n][spec.slice]) + if spec.scale is not None: + v *= spec.scale + sdt_md[name] = v + except Exception as e: + warnings.warn( + f"Failed to decode SDT-control metadata field `{name}`: {e}" + ) + sdt_md[name] = None + if version not in __class__.comment_fields: + supported_ver = ", ".join( + map(lambda x: f"{x[0]}.{x[1]:02}", __class__.comment_fields) + ) + warnings.warn( + f"Unsupported SDT-control metadata version {version[0]}.{version[1]:02}. " + f"Only versions {supported_ver} are supported. " + "Some or all SDT-control metadata may be missing." + ) + comment = comments[0] + comments[2] + sdt_md["comment"] = comment.strip() + return sdt_md + + @staticmethod + def get_datetime(date: str, time: str) -> Union[datetime, None]: + """Turn date and time saved by SDT-control into proper datetime object + + Parameters + ---------- + date + SPE file date, typically ``metadata["date"]``. + time + SPE file date, typically ``metadata["time_local"]``. + + Returns + ------- + File's datetime if parsing was succsessful, else None. + """ + try: + month = __class__.months[date[2:5]] + return datetime( + int(date[5:9]), + month, + int(date[0:2]), + int(time[0:2]), + int(time[2:4]), + int(time[4:6]), + ) + except Exception as e: + logger.info(f"Failed to decode date from SDT-control metadata: {e}.") + + @staticmethod + def extract_metadata(meta: Mapping, char_encoding: str = "latin1"): + """Extract SDT-control metadata from SPE metadata + + SDT-control stores some metadata in comments and other fields. + Extract them and remove unused entries. + + Parameters + ---------- + meta + SPE file metadata. Modified in place. + char_encoding + Character encoding used to decode strings in the metadata. + """ + comver = __class__.get_comment_version(meta["comments"]) + if any(c < 0 for c in comver): + # This file most likely was not created by SDT-control + logger.debug("SDT-control comments not found.") + return + + sdt_meta = __class__.parse_comments(meta["comments"], comver) + meta.pop("comments") + meta.update(sdt_meta) + + # Get date and time in a usable format + dt = __class__.get_datetime(meta["date"], meta["time_local"]) + if dt: + meta["datetime"] = dt + meta.pop("date") + meta.pop("time_local") + + sp4 = meta["spare_4"] + try: + meta["modulation_script"] = sp4.decode(char_encoding) + meta.pop("spare_4") + except UnicodeDecodeError: + warnings.warn( + "Failed to decode SDT-control laser " + "modulation script. Bad char_encoding?" + ) + + # Get rid of unused data + meta.pop("time_utc") + meta.pop("exposure_sec") + + +class SpePlugin(PluginV3): + def __init__( + self, + request: Request, + check_filesize: bool = True, + char_encoding: Optional[str] = None, + sdt_meta: Optional[bool] = None, + ) -> None: + """Instantiate a new SPE file plugin object + + Parameters + ---------- + request : Request + A request object representing the resource to be operated on. + check_filesize : bool + If True, compute the number of frames from the filesize, compare it + to the frame count in the file header, and raise a warning if the + counts don't match. (Certain software may create files with + char_encoding : str + Deprecated. Exists for backwards compatibility; use ``char_encoding`` of + ``metadata`` instead. + sdt_meta : bool + Deprecated. Exists for backwards compatibility; use ``sdt_control`` of + ``metadata`` instead. + + """ + + super().__init__(request) + if request.mode.io_mode == IOMode.write: + raise InitializationError("cannot write SPE files") + + if char_encoding is not None: + warnings.warn( + "Passing `char_encoding` to the constructor is deprecated. " + "Use `char_encoding` parameter of the `metadata()` method " + "instead.", + DeprecationWarning, + ) + self._char_encoding = char_encoding + if sdt_meta is not None: + warnings.warn( + "Passing `sdt_meta` to the constructor is deprecated. " + "Use `sdt_control` parameter of the `metadata()` method " + "instead.", + DeprecationWarning, + ) + self._sdt_meta = sdt_meta + + self._file = self.request.get_file() + + try: + # Spec.basic contains no string, no need to worry about character + # encoding. + info = self._parse_header(Spec.basic, "latin1") + self._file_header_ver = info["file_header_ver"] + self._dtype = Spec.dtypes[info["datatype"]] + self._shape = (info["ydim"], info["xdim"]) + self._len = info["NumFrames"] + + if check_filesize: + # Some software writes incorrect `NumFrames` metadata. + # To determine the number of frames, check the size of the data + # segment -- until the end of the file for SPE<3, until the + # xml footer for SPE>=3. + if info["file_header_ver"] >= 3: + data_end = info["xml_footer_offset"] + else: + self._file.seek(0, os.SEEK_END) + data_end = self._file.tell() + line = data_end - Spec.data_start + line //= self._shape[0] * self._shape[1] * self._dtype.itemsize + if line != self._len: + warnings.warn( + f"The file header of {self.request.filename} claims there are " + f"{self._len} frames, but there are actually {line} frames." + ) + self._len = min(line, self._len) + self._file.seek(Spec.data_start) + except Exception: + raise InitializationError("SPE plugin cannot read the provided file.") + + def read(self, *, index: int = ...) -> np.ndarray: + """Read a frame or all frames from the file + + Parameters + ---------- + index : int + Select the index-th frame from the file. If index is `...`, + select all frames and stack them along a new axis. + + Returns + ------- + A Numpy array of pixel values. + + """ + + if index is Ellipsis: + read_offset = Spec.data_start + count = self._shape[0] * self._shape[1] * self._len + out_shape = (self._len, *self._shape) + elif index < 0: + raise IndexError(f"Index `{index}` is smaller than 0.") + elif index >= self._len: + raise IndexError( + f"Index `{index}` exceeds the number of frames stored in this file (`{self._len}`)." + ) + else: + read_offset = ( + Spec.data_start + + index * self._shape[0] * self._shape[1] * self._dtype.itemsize + ) + count = self._shape[0] * self._shape[1] + out_shape = self._shape + + self._file.seek(read_offset) + data = np.fromfile(self._file, dtype=self._dtype, count=count) + return data.reshape(out_shape) + + def iter(self) -> Iterator[np.ndarray]: + """Iterate over the frames in the file + + Yields + ------ + A Numpy array of pixel values. + """ + + return (self.read(index=i) for i in range(self._len)) + + def metadata( + self, + index: int = ..., + exclude_applied: bool = True, + char_encoding: str = "latin1", + sdt_control: bool = True, + ) -> Dict[str, Any]: + """SPE specific metadata. + + Parameters + ---------- + index : int + Ignored as SPE files only store global metadata. + exclude_applied : bool + Ignored. Exists for API compatibility. + char_encoding : str + The encoding to use when parsing strings. + sdt_control : bool + If `True`, decode special metadata written by the + SDT-control software if present. + + Returns + ------- + metadata : dict + Key-value pairs of metadata. + + Notes + ----- + SPE v3 stores metadata as XML, whereas SPE v2 uses a binary format. + + .. rubric:: Supported SPE v2 Metadata fields + + ROIs : list of dict + Regions of interest used for recording images. Each dict has the + "top_left" key containing x and y coordinates of the top left corner, + the "bottom_right" key with x and y coordinates of the bottom right + corner, and the "bin" key with number of binned pixels in x and y + directions. + comments : list of str + The SPE format allows for 5 comment strings of 80 characters each. + controller_version : int + Hardware version + logic_output : int + Definition of output BNC + amp_hi_cap_low_noise : int + Amp switching mode + mode : int + Timing mode + exp_sec : float + Alternative exposure in seconds + date : str + Date string + detector_temp : float + Detector temperature + detector_type : int + CCD / diode array type + st_diode : int + Trigger diode + delay_time : float + Used with async mode + shutter_control : int + Normal, disabled open, or disabled closed + absorb_live : bool + on / off + absorb_mode : int + Reference strip or file + can_do_virtual_chip : bool + True or False whether chip can do virtual chip + threshold_min_live : bool + on / off + threshold_min_val : float + Threshold minimum value + threshold_max_live : bool + on / off + threshold_max_val : float + Threshold maximum value + time_local : str + Experiment local time + time_utc : str + Experiment UTC time + adc_offset : int + ADC offset + adc_rate : int + ADC rate + adc_type : int + ADC type + adc_resolution : int + ADC resolution + adc_bit_adjust : int + ADC bit adjust + gain : int + gain + sw_version : str + Version of software which created this file + spare_4 : bytes + Reserved space + readout_time : float + Experiment readout time + type : str + Controller type + clockspeed_us : float + Vertical clock speed in microseconds + readout_mode : ["full frame", "frame transfer", "kinetics", ""] + Readout mode. Empty string means that this was not set by the + Software. + window_size : int + Window size for Kinetics mode + file_header_ver : float + File header version + chip_size : [int, int] + x and y dimensions of the camera chip + virt_chip_size : [int, int] + Virtual chip x and y dimensions + pre_pixels : [int, int] + Pre pixels in x and y dimensions + post_pixels : [int, int], + Post pixels in x and y dimensions + geometric : list of {"rotate", "reverse", "flip"} + Geometric operations + sdt_major_version : int + (only for files created by SDT-control) + Major version of SDT-control software + sdt_minor_version : int + (only for files created by SDT-control) + Minor version of SDT-control software + sdt_controller_name : str + (only for files created by SDT-control) + Controller name + exposure_time : float + (only for files created by SDT-control) + Exposure time in seconds + color_code : str + (only for files created by SDT-control) + Color channels used + detection_channels : int + (only for files created by SDT-control) + Number of channels + background_subtraction : bool + (only for files created by SDT-control) + Whether background subtraction war turned on + em_active : bool + (only for files created by SDT-control) + Whether EM was turned on + em_gain : int + (only for files created by SDT-control) + EM gain + modulation_active : bool + (only for files created by SDT-control) + Whether laser modulation (“attenuate”) was turned on + pixel_size : float + (only for files created by SDT-control) + Camera pixel size + sequence_type : str + (only for files created by SDT-control) + Type of sequnce (standard, TOCCSL, arbitrary, …) + grid : float + (only for files created by SDT-control) + Sequence time unit (“grid size”) in seconds + n_macro : int + (only for files created by SDT-control) + Number of macro loops + delay_macro : float + (only for files created by SDT-control) + Time between macro loops in seconds + n_mini : int + (only for files created by SDT-control) + Number of mini loops + delay_mini : float + (only for files created by SDT-control) + Time between mini loops in seconds + n_micro : int (only for files created by SDT-control) + Number of micro loops + delay_micro : float (only for files created by SDT-control) + Time between micro loops in seconds + n_subpics : int + (only for files created by SDT-control) + Number of sub-pictures + delay_shutter : float + (only for files created by SDT-control) + Camera shutter delay in seconds + delay_prebleach : float + (only for files created by SDT-control) + Pre-bleach delay in seconds + bleach_time : float + (only for files created by SDT-control) + Bleaching time in seconds + recovery_time : float + (only for files created by SDT-control) + Recovery time in seconds + comment : str + (only for files created by SDT-control) + User-entered comment. This replaces the "comments" field. + datetime : datetime.datetime + (only for files created by SDT-control) + Combines the "date" and "time_local" keys. The latter two plus + "time_utc" are removed. + modulation_script : str + (only for files created by SDT-control) + Laser modulation script. Replaces the "spare_4" key. + bleach_piezo_active : bool + (only for files created by SDT-control) + Whether piezo for bleaching was enabled + """ + + if self._file_header_ver < 3: + if self._char_encoding is not None: + char_encoding = self._char_encoding + if self._sdt_meta is not None: + sdt_control = self._sdt_meta + return self._metadata_pre_v3(char_encoding, sdt_control) + return self._metadata_post_v3() + + def _metadata_pre_v3(self, char_encoding: str, sdt_control: bool) -> Dict[str, Any]: + """Extract metadata from SPE v2 files + + Parameters + ---------- + char_encoding + String character encoding + sdt_control + If `True`, try to decode special metadata written by the + SDT-control software. + + Returns + ------- + dict mapping metadata names to values. + + """ + + m = self._parse_header(Spec.metadata, char_encoding) + + nr = m.pop("NumROI", None) + nr = 1 if nr < 1 else nr + m["ROIs"] = roi_array_to_dict(m["ROIs"][:nr]) + + # chip sizes + m["chip_size"] = [m.pop(k, None) for k in ("xDimDet", "yDimDet")] + m["virt_chip_size"] = [m.pop(k, None) for k in ("VChipXdim", "VChipYdim")] + m["pre_pixels"] = [m.pop(k, None) for k in ("XPrePixels", "YPrePixels")] + m["post_pixels"] = [m.pop(k, None) for k in ("XPostPixels", "YPostPixels")] + + # convert comments from numpy.str_ to str + m["comments"] = [str(c) for c in m["comments"]] + + # geometric operations + g = [] + f = m.pop("geometric", 0) + if f & 1: + g.append("rotate") + if f & 2: + g.append("reverse") + if f & 4: + g.append("flip") + m["geometric"] = g + + # Make some additional information more human-readable + t = m["type"] + if 1 <= t <= len(Spec.controllers): + m["type"] = Spec.controllers[t - 1] + else: + m["type"] = None + r = m["readout_mode"] + if 1 <= r <= len(Spec.readout_modes): + m["readout_mode"] = Spec.readout_modes[r - 1] + else: + m["readout_mode"] = None + + # bools + for k in ( + "absorb_live", + "can_do_virtual_chip", + "threshold_min_live", + "threshold_max_live", + ): + m[k] = bool(m[k]) + + # Extract SDT-control metadata if desired + if sdt_control: + SDTControlSpec.extract_metadata(m, char_encoding) + + return m + + def _metadata_post_v3(self) -> Dict[str, Any]: + """Extract XML metadata from SPE v3 files + + Returns + ------- + dict with key `"__xml"`, whose value is the XML metadata + """ + + info = self._parse_header(Spec.basic, "latin1") + self._file.seek(info["xml_footer_offset"]) + xml = self._file.read() + return {"__xml": xml} + + def properties(self, index: int = ...) -> ImageProperties: + """Standardized ndimage metadata. + + Parameters + ---------- + index : int + If the index is an integer, select the index-th frame and return + its properties. If index is an Ellipsis (...), return the + properties of all frames in the file stacked along a new batch + dimension. + + Returns + ------- + properties : ImageProperties + A dataclass filled with standardized image metadata. + """ + + if index is Ellipsis: + return ImageProperties( + shape=(self._len, *self._shape), + dtype=self._dtype, + n_images=self._len, + is_batch=True, + ) + return ImageProperties(shape=self._shape, dtype=self._dtype, is_batch=False) + + def _parse_header( + self, spec: Mapping[str, Tuple], char_encoding: str + ) -> Dict[str, Any]: + """Get information from SPE file header + + Parameters + ---------- + spec + Maps header entry name to its location, data type description and + optionally number of entries. See :py:attr:`Spec.basic` and + :py:attr:`Spec.metadata`. + char_encoding + String character encoding + + Returns + ------- + Dict mapping header entry name to its value + """ + + ret = {} + # Decode each string from the numpy array read by np.fromfile + decode = np.vectorize(lambda x: x.decode(char_encoding)) + + for name, sp in spec.items(): + self._file.seek(sp[0]) + cnt = 1 if len(sp) < 3 else sp[2] + v = np.fromfile(self._file, dtype=sp[1], count=cnt) + if v.dtype.kind == "S" and name not in Spec.no_decode: + # Silently ignore string decoding failures + try: + v = decode(v) + except Exception: + warnings.warn( + f'Failed to decode "{name}" metadata ' + "string. Check `char_encoding` parameter." + ) + + try: + # For convenience, if the array contains only one single + # entry, return this entry itself. + v = v.item() + except ValueError: + v = np.squeeze(v) + ret[name] = v + return ret + + +def roi_array_to_dict(a: np.ndarray) -> List[Dict[str, List[int]]]: + """Convert the `ROIs` structured arrays to :py:class:`dict` + + Parameters + ---------- + a + Structured array containing ROI data + + Returns + ------- + One dict per ROI. Keys are "top_left", "bottom_right", and "bin", + values are tuples whose first element is the x axis value and the + second element is the y axis value. + """ + + dict_list = [] + a = a[["startx", "starty", "endx", "endy", "groupx", "groupy"]] + for sx, sy, ex, ey, gx, gy in a: + roi_dict = { + "top_left": [int(sx), int(sy)], + "bottom_right": [int(ex), int(ey)], + "bin": [int(gx), int(gy)], + } + dict_list.append(roi_dict) + return dict_list diff --git a/.venv/Lib/site-packages/imageio/plugins/swf.py b/.venv/Lib/site-packages/imageio/plugins/swf.py new file mode 100644 index 00000000..9d507dde --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/swf.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read/Write SWF files. + +Backend: internal + +Shockwave flash (SWF) is a media format designed for rich and +interactive animations. This plugin makes use of this format to +store a series of images in a lossless format with good compression +(zlib). The resulting images can be shown as an animation using +a flash player (such as the browser). + +SWF stores images in RGBA format. RGB or grayscale images are +automatically converted. SWF does not support meta data. + +Parameters for reading +---------------------- +loop : bool + If True, the video will rewind as soon as a frame is requested + beyond the last frame. Otherwise, IndexError is raised. Default False. + +Parameters for saving +--------------------- +fps : int + The speed to play the animation. Default 12. +loop : bool + If True, add a tag to the end of the file to play again from + the first frame. Most flash players will then play the movie + in a loop. Note that the imageio SWF Reader does not check this + tag. Default True. +html : bool + If the output is a file on the file system, write an html file + (in HTML5) that shows the animation. Default False. +compress : bool + Whether to compress the swf file. Default False. You probably don't + want to use this. This does not decrease the file size since + the images are already compressed. It will result in slower + read and write time. The only purpose of this feature is to + create compressed SWF files, so that we can test the + functionality to read them. + +""" + +import os +import zlib +import logging +from io import BytesIO + +import numpy as np + +from ..core import Format, read_n_bytes, image_as_uint + + +logger = logging.getLogger(__name__) + +_swf = None # lazily loaded in lib() + + +def load_lib(): + global _swf + from . import _swf + + return _swf + + +class SWFFormat(Format): + """See :mod:`imageio.plugins.swf`""" + + def _can_read(self, request): + tmp = request.firstbytes[0:3].decode("ascii", "ignore") + if tmp in ("FWS", "CWS"): + return True + + def _can_write(self, request): + if request.extension in self.extensions: + return True + + # -- reader + + class Reader(Format.Reader): + def _open(self, loop=False): + if not _swf: + load_lib() + + self._arg_loop = bool(loop) + + self._fp = self.request.get_file() + + # Check file ... + tmp = self.request.firstbytes[0:3].decode("ascii", "ignore") + if tmp == "FWS": + pass # OK + elif tmp == "CWS": + # Compressed, we need to decompress + bb = self._fp.read() + bb = bb[:8] + zlib.decompress(bb[8:]) + # Wrap up in a file object + self._fp = BytesIO(bb) + else: + raise IOError("This does not look like a valid SWF file") + + # Skip first bytes. This also tests support got seeking ... + try: + self._fp.seek(8) + self._streaming_mode = False + except Exception: + self._streaming_mode = True + self._fp_read(8) + + # Skip header + # Note that the number of frames is there, which we could + # potentially use, but the number of frames does not necessarily + # correspond to the number of images. + nbits = _swf.bits2int(self._fp_read(1), 5) + nbits = 5 + nbits * 4 + Lrect = nbits / 8.0 + if Lrect % 1: + Lrect += 1 + Lrect = int(Lrect) + self._fp_read(Lrect + 3) + + # Now the rest is basically tags ... + self._imlocs = [] # tuple (loc, sze, T, L1) + if not self._streaming_mode: + # Collect locations of frame, while skipping through the data + # This does not read any of the tag *data*. + try: + while True: + isimage, sze, T, L1 = self._read_one_tag() + loc = self._fp.tell() + if isimage: + # Still need to check if the format is right + format = ord(self._fp_read(3)[2:]) + if format == 5: # RGB or RGBA lossless + self._imlocs.append((loc, sze, T, L1)) + self._fp.seek(loc + sze) # Skip over tag + except IndexError: + pass # done reading + + def _fp_read(self, n): + return read_n_bytes(self._fp, n) + + def _close(self): + pass + + def _get_length(self): + if self._streaming_mode: + return np.inf + else: + return len(self._imlocs) + + def _get_data(self, index): + # Check index + if index < 0: + raise IndexError("Index in swf file must be > 0") + if not self._streaming_mode: + if self._arg_loop and self._imlocs: + index = index % len(self._imlocs) + if index >= len(self._imlocs): + raise IndexError("Index out of bounds") + + if self._streaming_mode: + # Walk over tags until we find an image + while True: + isimage, sze, T, L1 = self._read_one_tag() + bb = self._fp_read(sze) # always read data + if isimage: + im = _swf.read_pixels(bb, 0, T, L1) # can be None + if im is not None: + return im, {} + + else: + # Go to corresponding location, read data, and convert to image + loc, sze, T, L1 = self._imlocs[index] + self._fp.seek(loc) + bb = self._fp_read(sze) + # Read_pixels should return ndarry, since we checked format + im = _swf.read_pixels(bb, 0, T, L1) + return im, {} + + def _read_one_tag(self): + """ + Return (True, loc, size, T, L1) if an image that we can read. + Return (False, loc, size, T, L1) if any other tag. + """ + + # Get head + head = self._fp_read(6) + if not head: # pragma: no cover + raise IndexError("Reached end of swf movie") + + # Determine type and length + T, L1, L2 = _swf.get_type_and_len(head) + if not L2: # pragma: no cover + raise RuntimeError("Invalid tag length, could not proceed") + + # Read data + isimage = False + sze = L2 - 6 + # bb = self._fp_read(L2 - 6) + + # Parse tag + if T == 0: + raise IndexError("Reached end of swf movie") + elif T in [20, 36]: + isimage = True + # im = _swf.read_pixels(bb, 0, T, L1) # can be None + elif T in [6, 21, 35, 90]: # pragma: no cover + logger.warning("Ignoring JPEG image: cannot read JPEG.") + else: + pass # Not an image tag + + # Done. Return image. Can be None + # return im + return isimage, sze, T, L1 + + def _get_meta_data(self, index): + return {} # This format does not support meta data + + # -- writer + + class Writer(Format.Writer): + def _open(self, fps=12, loop=True, html=False, compress=False): + if not _swf: + load_lib() + + self._arg_fps = int(fps) + self._arg_loop = bool(loop) + self._arg_html = bool(html) + self._arg_compress = bool(compress) + + self._fp = self.request.get_file() + self._framecounter = 0 + self._framesize = (100, 100) + + # For compress, we use an in-memory file object + if self._arg_compress: + self._fp_real = self._fp + self._fp = BytesIO() + + def _close(self): + self._complete() + # Get size of (uncompressed) file + sze = self._fp.tell() + # set nframes, this is in the potentially compressed region + self._fp.seek(self._location_to_save_nframes) + self._fp.write(_swf.int2uint16(self._framecounter)) + # Compress body? + if self._arg_compress: + bb = self._fp.getvalue() + self._fp = self._fp_real + self._fp.write(bb[:8]) + self._fp.write(zlib.compress(bb[8:])) + sze = self._fp.tell() # renew sze value + # set size + self._fp.seek(4) + self._fp.write(_swf.int2uint32(sze)) + self._fp = None # Disable + + # Write html? + if self._arg_html and os.path.isfile(self.request.filename): + dirname, fname = os.path.split(self.request.filename) + filename = os.path.join(dirname, fname[:-4] + ".html") + w, h = self._framesize + html = HTML % (fname, w, h, fname) + with open(filename, "wb") as f: + f.write(html.encode("utf-8")) + + def _write_header(self, framesize, fps): + self._framesize = framesize + # Called as soon as we know framesize; when we get first frame + bb = b"" + bb += "FC"[self._arg_compress].encode("ascii") + bb += "WS".encode("ascii") # signature bytes + bb += _swf.int2uint8(8) # version + bb += "0000".encode("ascii") # FileLength (leave open for now) + bb += ( + _swf.Tag().make_rect_record(0, framesize[0], 0, framesize[1]).tobytes() + ) + bb += _swf.int2uint8(0) + _swf.int2uint8(fps) # FrameRate + self._location_to_save_nframes = len(bb) + bb += "00".encode("ascii") # nframes (leave open for now) + self._fp.write(bb) + + # Write some initial tags + taglist = _swf.FileAttributesTag(), _swf.SetBackgroundTag(0, 0, 0) + for tag in taglist: + self._fp.write(tag.get_tag()) + + def _complete(self): + # What if no images were saved? + if not self._framecounter: + self._write_header((10, 10), self._arg_fps) + # Write stop tag if we do not loop + if not self._arg_loop: + self._fp.write(_swf.DoActionTag("stop").get_tag()) + # finish with end tag + self._fp.write("\x00\x00".encode("ascii")) + + def _append_data(self, im, meta): + # Correct shape and type + if im.ndim == 3 and im.shape[-1] == 1: + im = im[:, :, 0] + im = image_as_uint(im, bitdepth=8) + # Get frame size + wh = im.shape[1], im.shape[0] + # Write header on first frame + isfirstframe = False + if self._framecounter == 0: + isfirstframe = True + self._write_header(wh, self._arg_fps) + # Create tags + bm = _swf.BitmapTag(im) + sh = _swf.ShapeTag(bm.id, (0, 0), wh) + po = _swf.PlaceObjectTag(1, sh.id, move=(not isfirstframe)) + sf = _swf.ShowFrameTag() + # Write tags + for tag in [bm, sh, po, sf]: + self._fp.write(tag.get_tag()) + self._framecounter += 1 + + def set_meta_data(self, meta): + pass + + +HTML = """ + + + + Show Flash animation %s + + + + +""" diff --git a/.venv/Lib/site-packages/imageio/plugins/tifffile.py b/.venv/Lib/site-packages/imageio/plugins/tifffile.py new file mode 100644 index 00000000..190cfe26 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/tifffile.py @@ -0,0 +1,561 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +""" Read/Write TIFF files. + +Backend: internal + +Provides support for a wide range of Tiff images using the tifffile +backend. + +Parameters for reading +---------------------- +offset : int + Optional start position of embedded file. By default this is + the current file position. +size : int + Optional size of embedded file. By default this is the number + of bytes from the 'offset' to the end of the file. +multifile : bool + If True (default), series may include pages from multiple files. + Currently applies to OME-TIFF only. +multifile_close : bool + If True (default), keep the handles of other files in multifile + series closed. This is inefficient when few files refer to + many pages. If False, the C runtime may run out of resources. + +Parameters for saving +--------------------- +bigtiff : bool + If True, the BigTIFF format is used. +byteorder : {'<', '>'} + The endianness of the data in the file. + By default this is the system's native byte order. +software : str + Name of the software used to create the image. + Saved with the first page only. + +Metadata for reading +-------------------- +planar_configuration : {'contig', 'planar'} + Specifies if samples are stored contiguous or in separate planes. + By default this setting is inferred from the data shape. + 'contig': last dimension contains samples. + 'planar': third last dimension contains samples. +resolution_unit : int + The resolution unit stored in the TIFF tag. Usually 1 means no/unknown unit, + 2 means dpi (inch), 3 means dpc (centimeter). +resolution : (float, float, str) + A tuple formatted as (X_resolution, Y_resolution, unit). The unit is a + string representing one of the following units:: + + NONE # No unit or unit unknown + INCH # dpi + CENTIMETER # cpi + MILLIMETER + MICROMETER + +compression : int + Value indicating the compression algorithm used, e.g. 5 is LZW, + 7 is JPEG, 8 is deflate. + If 1, data are uncompressed. +predictor : int + Value 2 indicates horizontal differencing was used before compression, + while 3 indicates floating point horizontal differencing. + If 1, no prediction scheme was used before compression. +orientation : {'top_left', 'bottom_right', ...} + Oriented of image array. +is_rgb : bool + True if page contains a RGB image. +is_contig : bool + True if page contains a contiguous image. +is_tiled : bool + True if page contains tiled image. +is_palette : bool + True if page contains a palette-colored image and not OME or STK. +is_reduced : bool + True if page is a reduced image of another image. +is_shaped : bool + True if page contains shape in image_description tag. +is_fluoview : bool + True if page contains FluoView MM_STAMP tag. +is_nih : bool + True if page contains NIH image header. +is_micromanager : bool + True if page contains Micro-Manager metadata. +is_ome : bool + True if page contains OME-XML in image_description tag. +is_sgi : bool + True if page contains SGI image and tile depth tags. +is_mdgel : bool + True if page contains md_file_tag tag. +is_mediacy : bool + True if page contains Media Cybernetics Id tag. +is_stk : bool + True if page contains UIC2Tag tag. +is_lsm : bool + True if page contains LSM CZ_LSM_INFO tag. +description : str + Image description +description1 : str + Additional description +is_imagej : None or str + ImageJ metadata +software : str + Software used to create the TIFF file +datetime : datetime.datetime + Creation date and time + +Metadata for writing +-------------------- +photometric : {'minisblack', 'miniswhite', 'rgb'} + The color space of the image data. + By default this setting is inferred from the data shape. +planarconfig : {'contig', 'planar'} + Specifies if samples are stored contiguous or in separate planes. + By default this setting is inferred from the data shape. + 'contig': last dimension contains samples. + 'planar': third last dimension contains samples. +resolution : (float, float) or ((int, int), (int, int)) + X and Y resolution in dots per inch as float or rational numbers. +description : str + The subject of the image. Saved with the first page only. +compress : int + Values from 0 to 9 controlling the level of zlib (deflate) compression. + If 0, data are written uncompressed (default). +compression : str, (int, int) + Compression scheme used while writing the image. If omitted (default) the + image is not uncompressed. Compression cannot be used to write contiguous + series. Compressors may require certain data shapes, types or value ranges. + For example, JPEG compression requires grayscale or RGB(A), uint8 or 12-bit + uint16. JPEG compression is experimental. JPEG markers and TIFF tags may not + match. Only a limited set of compression schemes are implemented. 'ZLIB' is + short for ADOBE_DEFLATE. The value is written to the Compression tag. +compressionargs: + Extra arguments passed to compression codec, e.g., compression level. Refer + to the Imagecodecs implementation for supported arguments. +predictor : bool + If True, horizontal differencing is applied before compression. + Note that using an int literal 1 actually means no prediction scheme + will be used. +volume : bool + If True, volume data are stored in one tile (if applicable) using + the SGI image_depth and tile_depth tags. + Image width and depth must be multiple of 16. + Few software can read this format, e.g. MeVisLab. +writeshape : bool + If True, write the data shape to the image_description tag + if necessary and no other description is given. +extratags: sequence of tuples + Additional tags as [(code, dtype, count, value, writeonce)]. + + code : int + The TIFF tag Id. + dtype : str + Data type of items in 'value' in Python struct format. + One of B, s, H, I, 2I, b, h, i, f, d, Q, or q. + count : int + Number of data values. Not used for string values. + value : sequence + 'Count' values compatible with 'dtype'. + writeonce : bool + If True, the tag is written to the first page only. + +Notes +----- +Global metadata is stored with the first frame in a TIFF file. +Thus calling :py:meth:`Format.Writer.set_meta_data` after the first frame +was written has no effect. Also, global metadata is ignored if metadata is +provided via the `meta` argument of :py:meth:`Format.Writer.append_data`. + +If you have installed tifffile as a Python package, imageio will attempt +to use that as backend instead of the bundled backend. Doing so can +provide access to new performance improvements and bug fixes. + +""" + +import datetime + +from ..core import Format +from ..core.request import URI_BYTES, URI_FILE + +import numpy as np +import warnings + + +try: + import tifffile as _tifffile +except ImportError: + warnings.warn( + "ImageIO's vendored tifffile backend is deprecated and will be" + " removed in ImageIO v3. Install the tifffile directly:" + " `pip install imageio[tifffile]`", + DeprecationWarning, + ) + from . import _tifffile + + +TIFF_FORMATS = (".tif", ".tiff", ".stk", ".lsm") +WRITE_METADATA_KEYS = ( + "photometric", + "planarconfig", + "resolution", + "description", + "compress", + "compression", + "compressionargs", + "predictor", + "volume", + "writeshape", + "extratags", + "datetime", +) +READ_METADATA_KEYS = ( + "planar_configuration", + "is_fluoview", + "is_nih", + "is_contig", + "is_micromanager", + "is_ome", + "is_lsm", + "is_palette", + "is_reduced", + "is_rgb", + "is_sgi", + "is_shaped", + "is_stk", + "is_tiled", + "is_mdgel", + "resolution_unit", + "compression", + "predictor", + "is_mediacy", + "orientation", + "description", + "description1", + "is_imagej", + "software", +) + + +class TiffFormat(Format): + """Provides support for a wide range of Tiff images using the tifffile + backend. + + Images that contain multiple pages can be read using ``imageio.mimread()`` + to read the individual pages, or ``imageio.volread()`` to obtain a + single (higher dimensional) array. + + Note that global metadata is stored with the first frame in a TIFF file. + Thus calling :py:meth:`Format.Writer.set_meta_data` after the first frame + was written has no effect. Also, global metadata is ignored if metadata is + provided via the `meta` argument of :py:meth:`Format.Writer.append_data`. + + If you have installed tifffile as a Python package, imageio will attempt + to use that as backend instead of the bundled backend. Doing so can + provide access to new performance improvements and bug fixes. + + Parameters for reading + ---------------------- + offset : int + Optional start position of embedded file. By default this is + the current file position. + size : int + Optional size of embedded file. By default this is the number + of bytes from the 'offset' to the end of the file. + multifile : bool + If True (default), series may include pages from multiple files. + Currently applies to OME-TIFF only. + multifile_close : bool + If True (default), keep the handles of other files in multifile + series closed. This is inefficient when few files refer to + many pages. If False, the C runtime may run out of resources. + + Parameters for saving + --------------------- + bigtiff : bool + If True, the BigTIFF format is used. + byteorder : {'<', '>'} + The endianness of the data in the file. + By default this is the system's native byte order. + software : str + Name of the software used to create the image. + Saved with the first page only. + + Metadata for reading + -------------------- + planar_configuration : {'contig', 'planar'} + Specifies if samples are stored contiguous or in separate planes. + By default this setting is inferred from the data shape. + 'contig': last dimension contains samples. + 'planar': third last dimension contains samples. + resolution_unit : (float, float) or ((int, int), (int, int)) + X and Y resolution in dots per inch as float or rational numbers. + compression : int + Value indicating the compression algorithm used, e.g. 5 is LZW, + 7 is JPEG, 8 is deflate. + If 1, data are uncompressed. + predictor : int + Value 2 indicates horizontal differencing was used before compression, + while 3 indicates floating point horizontal differencing. + If 1, no prediction scheme was used before compression. + orientation : {'top_left', 'bottom_right', ...} + Oriented of image array. + is_rgb : bool + True if page contains a RGB image. + is_contig : bool + True if page contains a contiguous image. + is_tiled : bool + True if page contains tiled image. + is_palette : bool + True if page contains a palette-colored image and not OME or STK. + is_reduced : bool + True if page is a reduced image of another image. + is_shaped : bool + True if page contains shape in image_description tag. + is_fluoview : bool + True if page contains FluoView MM_STAMP tag. + is_nih : bool + True if page contains NIH image header. + is_micromanager : bool + True if page contains Micro-Manager metadata. + is_ome : bool + True if page contains OME-XML in image_description tag. + is_sgi : bool + True if page contains SGI image and tile depth tags. + is_stk : bool + True if page contains UIC2Tag tag. + is_mdgel : bool + True if page contains md_file_tag tag. + is_mediacy : bool + True if page contains Media Cybernetics Id tag. + is_stk : bool + True if page contains UIC2Tag tag. + is_lsm : bool + True if page contains LSM CZ_LSM_INFO tag. + description : str + Image description + description1 : str + Additional description + is_imagej : None or str + ImageJ metadata + software : str + Software used to create the TIFF file + datetime : datetime.datetime + Creation date and time + + Metadata for writing + -------------------- + photometric : {'minisblack', 'miniswhite', 'rgb'} + The color space of the image data. + By default this setting is inferred from the data shape. + planarconfig : {'contig', 'planar'} + Specifies if samples are stored contiguous or in separate planes. + By default this setting is inferred from the data shape. + 'contig': last dimension contains samples. + 'planar': third last dimension contains samples. + resolution : (float, float) or ((int, int), (int, int)) + X and Y resolution in dots per inch as float or rational numbers. + description : str + The subject of the image. Saved with the first page only. + compress : int + Values from 0 to 9 controlling the level of zlib (deflate) compression. + If 0, data are written uncompressed (default). + predictor : bool + If True, horizontal differencing is applied before compression. + Note that using an int literal 1 actually means no prediction scheme + will be used. + volume : bool + If True, volume data are stored in one tile (if applicable) using + the SGI image_depth and tile_depth tags. + Image width and depth must be multiple of 16. + Few software can read this format, e.g. MeVisLab. + writeshape : bool + If True, write the data shape to the image_description tag + if necessary and no other description is given. + extratags: sequence of tuples + Additional tags as [(code, dtype, count, value, writeonce)]. + + code : int + The TIFF tag Id. + dtype : str + Data type of items in 'value' in Python struct format. + One of B, s, H, I, 2I, b, h, i, f, d, Q, or q. + count : int + Number of data values. Not used for string values. + value : sequence + 'Count' values compatible with 'dtype'. + writeonce : bool + If True, the tag is written to the first page only. + """ + + def _can_read(self, request): + try: + _tifffile.TiffFile(request.get_file(), **request.kwargs) + except ValueError: + # vendored backend raises value exception + return False + except _tifffile.TiffFileError: # pragma: no-cover + # current version raises custom exception + return False + finally: + request.get_file().seek(0) + + return True + + def _can_write(self, request): + if request._uri_type in [URI_FILE, URI_BYTES]: + pass # special URI + elif request.extension not in self.extensions: + return False + + try: + _tifffile.TiffWriter(request.get_file(), **request.kwargs) + except ValueError: + # vendored backend raises value exception + return False + except _tifffile.TiffFileError: # pragma: no-cover + # current version raises custom exception + return False + finally: + request.get_file().seek(0) + return True + + # -- reader + + class Reader(Format.Reader): + def _open(self, **kwargs): + # Allow loading from http; tifffile uses seek, so download first + if self.request.filename.startswith(("http://", "https://")): + self._f = f = open(self.request.get_local_filename(), "rb") + else: + self._f = None + f = self.request.get_file() + self._tf = _tifffile.TiffFile(f, **kwargs) + + def _close(self): + self._tf.close() + if self._f is not None: + self._f.close() + + def _get_length(self): + return len(self._tf.series) + + def _get_data(self, index): + if index < 0 or index >= self._get_length(): + raise IndexError("Index out of range while reading from tiff file") + + im = self._tf.asarray(series=index) + meta = self._get_meta_data(index) + + return im, meta + + def _get_meta_data(self, index): + meta = {} + page = self._tf.pages[index or 0] + for key in READ_METADATA_KEYS: + try: + meta[key] = getattr(page, key) + except Exception: + pass + + # tifffile <= 0.12.1 use datetime, newer use DateTime + for key in ("datetime", "DateTime"): + try: + meta["datetime"] = datetime.datetime.strptime( + page.tags[key].value, "%Y:%m:%d %H:%M:%S" + ) + break + except Exception: + pass + + if 296 in page.tags: + meta["resolution_unit"] = page.tags[296].value.value + + if 282 in page.tags and 283 in page.tags and 296 in page.tags: + resolution_x = page.tags[282].value + resolution_y = page.tags[283].value + if resolution_x[1] == 0 or resolution_y[1] == 0: + warnings.warn( + "Ignoring resolution metadata, " + "because at least one direction has a 0 denominator.", + RuntimeWarning, + ) + else: + meta["resolution"] = ( + resolution_x[0] / resolution_x[1], + resolution_y[0] / resolution_y[1], + page.tags[296].value.name, + ) + + return meta + + # -- writer + class Writer(Format.Writer): + def _open(self, bigtiff=None, byteorder=None, software=None): + try: + self._tf = _tifffile.TiffWriter( + self.request.get_file(), + bigtiff=bigtiff, + byteorder=byteorder, + software=software, + ) + self._software = None + except TypeError: + # In tifffile >= 0.15, the `software` arg is passed to + # TiffWriter.save + self._tf = _tifffile.TiffWriter( + self.request.get_file(), bigtiff=bigtiff, byteorder=byteorder + ) + self._software = software + + self._meta = {} + self._frames_written = 0 + + def _close(self): + self._tf.close() + + def _append_data(self, im, meta): + if meta is not None: + meta = self._sanitize_meta(meta) + else: + # Use global metadata for first frame + meta = self._meta if self._frames_written == 0 else {} + if self._software is not None and self._frames_written == 0: + meta["software"] = self._software + # No need to check self.request.mode; tifffile figures out whether + # this is a single page, or all page data at once. + try: + # TiffWriter.save has been deprecated in version 2020.9.30 + write_meth = self._tf.write + except AttributeError: + write_meth = self._tf.save + write_meth(np.asanyarray(im), contiguous=False, **meta) + self._frames_written += 1 + + @staticmethod + def _sanitize_meta(meta): + ret = {} + for key, value in meta.items(): + if key in WRITE_METADATA_KEYS: + # Special case of previously read `predictor` int value + # 1(=NONE) translation to False expected by TiffWriter.save + if key == "predictor" and not isinstance(value, bool): + ret[key] = value > 1 + elif key == "compress" and value != 0: + warnings.warn( + "The use of `compress` is deprecated. Use `compression` and `compressionargs` instead.", + DeprecationWarning, + ) + + if _tifffile.__version__ < "2022": + ret["compression"] = (8, value) + else: + ret["compression"] = "zlib" + ret["compressionargs"] = {"level": value} + else: + ret[key] = value + return ret + + def set_meta_data(self, meta): + self._meta = self._sanitize_meta(meta) diff --git a/.venv/Lib/site-packages/imageio/plugins/tifffile_v3.py b/.venv/Lib/site-packages/imageio/plugins/tifffile_v3.py new file mode 100644 index 00000000..054eaf1a --- /dev/null +++ b/.venv/Lib/site-packages/imageio/plugins/tifffile_v3.py @@ -0,0 +1,413 @@ +"""Read/Write TIFF files using tifffile. + +.. note:: + To use this plugin you need to have `tifffile + `_ installed:: + + pip install tifffile + +This plugin wraps tifffile, a powerful library to manipulate TIFF files. It +superseeds our previous tifffile plugin and aims to expose all the features of +tifffile. + +The plugin treats individual TIFF series as ndimages. A series is a sequence of +TIFF pages that, when combined describe a meaningful unit, e.g., a volumetric +image (where each slice is stored on an individual page) or a multi-color +staining picture (where each stain is stored on an individual page). Different +TIFF flavors/variants use series in different ways and, as such, the resulting +reading behavior may vary depending on the program used while creating a +particular TIFF file. + +Methods +------- +.. note:: + Check the respective function for a list of supported kwargs and detailed + documentation. + +.. autosummary:: + :toctree: + + TifffilePlugin.read + TifffilePlugin.iter + TifffilePlugin.write + TifffilePlugin.properties + TifffilePlugin.metadata + +Additional methods available inside the :func:`imopen ` +context: + +.. autosummary:: + :toctree: + + TifffilePlugin.iter_pages + +""" + +from io import BytesIO +from typing import Any, Dict, Optional, cast +import warnings + +import numpy as np +import tifffile + +from ..core.request import URI_BYTES, InitializationError, Request +from ..core.v3_plugin_api import ImageProperties, PluginV3 +from ..typing import ArrayLike + + +def _get_resolution(page: tifffile.TiffPage) -> Dict[str, Any]: + metadata = {} + + try: + metadata["resolution_unit"] = page.tags[296].value.value + except KeyError: + # tag 296 missing + return metadata + + try: + resolution_x = page.tags[282].value + resolution_y = page.tags[283].value + + metadata["resolution"] = ( + resolution_x[0] / resolution_x[1], + resolution_y[0] / resolution_y[1], + ) + except KeyError: + # tag 282 or 283 missing + pass + except ZeroDivisionError: + warnings.warn( + "Ignoring resolution metadata because at least one direction has a 0 " + "denominator.", + RuntimeWarning, + ) + + return metadata + + +class TifffilePlugin(PluginV3): + """Support for tifffile as backend. + + Parameters + ---------- + request : iio.Request + A request object that represents the users intent. It provides a + standard interface for a plugin to access the various ImageResources. + Check the docs for details. + kwargs : Any + Additional kwargs are forwarded to tifffile's constructor, i.e. + to ``TiffFile`` for reading or ``TiffWriter`` for writing. + + """ + + def __init__(self, request: Request, **kwargs) -> None: + super().__init__(request) + self._fh = None + + if request.mode.io_mode == "r": + try: + self._fh = tifffile.TiffFile(request.get_file(), **kwargs) + except tifffile.tifffile.TiffFileError: + raise InitializationError("Tifffile can not read this file.") + else: + self._fh = tifffile.TiffWriter(request.get_file(), **kwargs) + + # --------------------- + # Standard V3 Interface + # --------------------- + + def read(self, *, index: int = None, page: int = None, **kwargs) -> np.ndarray: + """Read a ndimage or page. + + The ndimage returned depends on the value of both ``index`` and + ``page``. ``index`` selects the series to read and ``page`` allows + selecting a single page from the selected series. If ``index=None``, + ``page`` is understood as a flat index, i.e., the selection ignores + individual series inside the file. If both ``index`` and ``page`` are + ``None``, then all the series are read and returned as a batch. + + Parameters + ---------- + index : int + If ``int``, select the ndimage (series) located at that index inside + the file and return ``page`` from it. If ``None`` and ``page`` is + ``int`` read the page located at that (flat) index inside the file. + If ``None`` and ``page=None``, read all ndimages from the file and + return them as a batch. + page : int + If ``None`` return the full selected ndimage. If ``int``, read the + page at the selected index and return it. + kwargs : Any + Additional kwargs are forwarded to TiffFile's ``as_array`` method. + + Returns + ------- + ndarray : np.ndarray + The decoded ndimage or page. + """ + + if "key" not in kwargs: + kwargs["key"] = page + elif page is not None: + raise ValueError("Can't use `page` and `key` at the same time.") + + # set plugin default for ``index`` + if index is not None and "series" in kwargs: + raise ValueError("Can't use `series` and `index` at the same time.") + elif "series" in kwargs: + index = kwargs.pop("series") + elif index is not None: + pass + else: + index = 0 + + if index is Ellipsis and page is None: + # read all series in the file and return them as a batch + ndimage = np.stack([x for x in self.iter(**kwargs)]) + else: + index = None if index is Ellipsis else index + ndimage = self._fh.asarray(series=index, **kwargs) + + return ndimage + + def iter(self, **kwargs) -> np.ndarray: + """Yield ndimages from the TIFF. + + Parameters + ---------- + kwargs : Any + Additional kwargs are forwarded to the TiffPageSeries' ``as_array`` + method. + + Yields + ------ + ndimage : np.ndarray + A decoded ndimage. + """ + + for sequence in self._fh.series: + yield sequence.asarray(**kwargs) + + def write( + self, ndimage: ArrayLike, *, is_batch: bool = False, **kwargs + ) -> Optional[bytes]: + """Save a ndimage as TIFF. + + Parameters + ---------- + ndimage : ArrayLike + The ndimage to encode and write to the ImageResource. + is_batch : bool + If True, the first dimension of the given ndimage is treated as a + batch dimension and each element will create a new series. + kwargs : Any + Additional kwargs are forwarded to TiffWriter's ``write`` method. + + Returns + ------- + encoded_image : bytes + If the ImageResource is ``""``, return the encoded bytes. + Otherwise write returns None. + + Notes + ----- + Incremental writing is supported. Subsequent calls to ``write`` will + create new series unless ``contiguous=True`` is used, in which case the + call to write will append to the current series. + + """ + + if not is_batch: + ndimage = np.asarray(ndimage)[None, :] + + for image in ndimage: + self._fh.write(image, **kwargs) + + if self._request._uri_type == URI_BYTES: + self._fh.close() + file = cast(BytesIO, self._request.get_file()) + return file.getvalue() + + def metadata( + self, *, index: int = Ellipsis, page: int = None, exclude_applied: bool = True + ) -> Dict[str, Any]: + """Format-Specific TIFF metadata. + + The metadata returned depends on the value of both ``index`` and + ``page``. ``index`` selects a series and ``page`` allows selecting a + single page from the selected series. If ``index=Ellipsis``, ``page`` is + understood as a flat index, i.e., the selection ignores individual + series inside the file. If ``index=Ellipsis`` and ``page=None`` then + global (file-level) metadata is returned. + + Parameters + ---------- + index : int + Select the series of which to extract metadata from. If Ellipsis, treat + page as a flat index into the file's pages. + page : int + If not None, select the page of which to extract metadata from. If + None, read series-level metadata or, if ``index=...`` global, + file-level metadata. + exclude_applied : bool + For API compatibility. Currently ignored. + + Returns + ------- + metadata : dict + A dictionary with information regarding the tiff flavor (file-level) + or tiff tags (page-level). + """ + + if index is not Ellipsis and page is not None: + target = self._fh.series[index].pages[page] + elif index is not Ellipsis and page is None: + # This is based on my understanding that series-level metadata is + # stored in the first TIFF page. + target = self._fh.series[index].pages[0] + elif index is Ellipsis and page is not None: + target = self._fh.pages[page] + else: + target = None + + metadata = {} + if target is None: + # return file-level metadata + metadata["byteorder"] = self._fh.byteorder + + for flag in tifffile.TIFF.FILE_FLAGS: + flag_value = getattr(self._fh, "is_" + flag) + metadata["is_" + flag] = flag_value + + if flag_value and hasattr(self._fh, flag + "_metadata"): + flavor_metadata = getattr(self._fh, flag + "_metadata") + if isinstance(flavor_metadata, tuple): + metadata.update(flavor_metadata[0]) + else: + metadata.update(flavor_metadata) + else: + # tifffile may return a TiffFrame instead of a page + target = target.keyframe + + metadata.update({tag.name: tag.value for tag in target.tags}) + metadata.update( + { + "planar_configuration": target.planarconfig, + "compression": target.compression, + "predictor": target.predictor, + "orientation": None, # TODO + "description1": target.description1, + "description": target.description, + "software": target.software, + **_get_resolution(target), + "datetime": target.datetime, + } + ) + + return metadata + + def properties(self, *, index: int = None, page: int = None) -> ImageProperties: + """Standardized metadata. + + The properties returned depend on the value of both ``index`` and + ``page``. ``index`` selects a series and ``page`` allows selecting a + single page from the selected series. If ``index=Ellipsis``, ``page`` is + understood as a flat index, i.e., the selection ignores individual + series inside the file. If ``index=Ellipsis`` and ``page=None`` then + global (file-level) properties are returned. If ``index=Ellipsis`` + and ``page=...``, file-level properties for the flattened index are + returned. + + Parameters + ---------- + index : int + If ``int``, select the ndimage (series) located at that index inside + the file. If ``Ellipsis`` and ``page`` is ``int`` extract the + properties of the page located at that (flat) index inside the file. + If ``Ellipsis`` and ``page=None``, return the properties for the + batch of all ndimages in the file. + page : int + If ``None`` return the properties of the full ndimage. If ``...`` + return the properties of the flattened index. If ``int``, + return the properties of the page at the selected index only. + + Returns + ------- + image_properties : ImageProperties + The standardized metadata (properties) of the selected ndimage or series. + + """ + index = index or 0 + page_idx = 0 if page in (None, Ellipsis) else page + + if index is Ellipsis: + target_page = self._fh.pages[page_idx] + else: + target_page = self._fh.series[index].pages[page_idx] + + if index is Ellipsis and page is None: + n_series = len(self._fh.series) + props = ImageProperties( + shape=(n_series, *target_page.shape), + dtype=target_page.dtype, + n_images=n_series, + is_batch=True, + spacing=_get_resolution(target_page).get("resolution"), + ) + elif index is Ellipsis and page is Ellipsis: + n_pages = len(self._fh.pages) + props = ImageProperties( + shape=(n_pages, *target_page.shape), + dtype=target_page.dtype, + n_images=n_pages, + is_batch=True, + spacing=_get_resolution(target_page).get("resolution"), + ) + else: + props = ImageProperties( + shape=target_page.shape, + dtype=target_page.dtype, + is_batch=False, + spacing=_get_resolution(target_page).get("resolution"), + ) + + return props + + def close(self) -> None: + if self._fh is not None: + self._fh.close() + + super().close() + + # ------------------------------ + # Add-on Interface inside imopen + # ------------------------------ + + def iter_pages(self, index=..., **kwargs): + """Yield pages from a TIFF file. + + This generator walks over the flat index of the pages inside an + ImageResource and yields them in order. + + Parameters + ---------- + index : int + The index of the series to yield pages from. If Ellipsis, walk over + the file's flat index (and ignore individual series). + kwargs : Any + Additional kwargs are passed to TiffPage's ``as_array`` method. + + Yields + ------ + page : np.ndarray + A page stored inside the TIFF file. + + """ + + if index is Ellipsis: + pages = self._fh.pages + else: + pages = self._fh.series[index] + + for page in pages: + yield page.asarray(**kwargs) diff --git a/.venv/Lib/site-packages/imageio/py.typed b/.venv/Lib/site-packages/imageio/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/imageio/testing.py b/.venv/Lib/site-packages/imageio/testing.py new file mode 100644 index 00000000..535b1386 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/testing.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Distributed under the (new) BSD License. See LICENSE.txt for more info. + +""" Functionality used for testing. This code itself is not covered in tests. +""" + +import os +import sys +import pytest + +# Get root dir +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) +ROOT_DIR = THIS_DIR +for i in range(9): + ROOT_DIR = os.path.dirname(ROOT_DIR) + if os.path.isfile(os.path.join(ROOT_DIR, ".gitignore")): + break + + +# Functions to use from invoke tasks + + +def test_unit(cov_report="term"): + """Run all unit tests. Returns exit code.""" + orig_dir = os.getcwd() + os.chdir(ROOT_DIR) + try: + _clear_imageio() + _enable_faulthandler() + return pytest.main( + [ + "-v", + "--cov", + "imageio", + "--cov-config", + ".coveragerc", + "--cov-report", + cov_report, + "tests", + ] + ) + finally: + os.chdir(orig_dir) + import imageio + + print("Tests were performed on", str(imageio)) + + +# Requirements + + +def _enable_faulthandler(): + """Enable faulthandler (if we can), so that we get tracebacks + on segfaults. + """ + try: + import faulthandler + + faulthandler.enable() + print("Faulthandler enabled") + except Exception: + print("Could not enable faulthandler") + + +def _clear_imageio(): + # Remove ourselves from sys.modules to force an import + for key in list(sys.modules.keys()): + if key.startswith("imageio"): + del sys.modules[key] diff --git a/.venv/Lib/site-packages/imageio/typing.py b/.venv/Lib/site-packages/imageio/typing.py new file mode 100644 index 00000000..1e97d5b1 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/typing.py @@ -0,0 +1,17 @@ +from io import BytesIO +from typing import Union, BinaryIO +from pathlib import Path + +try: + from numpy.typing import ArrayLike +except ImportError: + # numpy<1.20 fall back to using ndarray + from numpy import ndarray as ArrayLike + +ImageResource = Union[str, bytes, BytesIO, Path, BinaryIO] + + +__all__ = [ + "ArrayLike", + "ImageResource", +] diff --git a/.venv/Lib/site-packages/imageio/v2.py b/.venv/Lib/site-packages/imageio/v2.py new file mode 100644 index 00000000..db2963b7 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/v2.py @@ -0,0 +1,676 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +import re +import warnings +from numbers import Number +from pathlib import Path +from typing import Dict + +import numpy as np + +from imageio.core.legacy_plugin_wrapper import LegacyPlugin +from imageio.core.util import Array +from imageio.core.v3_plugin_api import PluginV3 + +from . import formats +from .config import known_extensions, known_plugins +from .core import RETURN_BYTES +from .core.imopen import imopen + +MEMTEST_DEFAULT_MIM = "256MB" +MEMTEST_DEFAULT_MVOL = "1GB" + + +mem_re = re.compile(r"^(\d+\.?\d*)\s*([kKMGTPEZY]?i?)B?$") +sizes = {"": 1, None: 1} +for i, si in enumerate([""] + list("kMGTPEZY")): + sizes[si] = 1000**i + if si: + sizes[si.upper() + "i"] = 1024**i + + +def to_nbytes(arg, default=None): + if not arg: + arg = float("inf") + + if arg is True: + arg = default + + if isinstance(arg, Number): + return arg + + match = mem_re.match(arg) + if match is None: + raise ValueError( + "Memory size could not be parsed " + "(is your capitalisation correct?): {}".format(arg) + ) + + num, unit = match.groups() + + try: + return float(num) * sizes[unit] + except KeyError: # pragma: no cover + # Note: I don't think we can reach this + raise ValueError( + "Memory size unit not recognised " + "(is your capitalisation correct?): {}".format(unit) + ) + + +def help(name=None): + """help(name=None) + + Print the documentation of the format specified by name, or a list + of supported formats if name is omitted. + + Parameters + ---------- + name : str + Can be the name of a format, a filename extension, or a full + filename. See also the :doc:`formats page <../formats/index>`. + """ + if not name: + print(formats) + else: + print(formats[name]) + + +def decypher_format_arg(format_name: str) -> Dict[str, str]: + """Split format into plugin and format + + The V2 API aliases plugins and supported formats. This function + splits these so that they can be fed separately to `iio.imopen`. + + """ + + plugin = None + extension = None + + if format_name is None: + pass # nothing to do + elif Path(format_name).suffix.lower() in known_extensions: + extension = Path(format_name).suffix.lower() + elif format_name in known_plugins: + plugin = format_name + elif format_name.upper() in known_plugins: + plugin = format_name.upper() + elif format_name.lower() in known_extensions: + extension = format_name.lower() + elif "." + format_name.lower() in known_extensions: + extension = "." + format_name.lower() + else: + raise IndexError(f"No format known by name `{plugin}`.") + + return {"plugin": plugin, "extension": extension} + + +class LegacyReader: + def __init__(self, plugin_instance: PluginV3, **kwargs): + self.instance = plugin_instance + self.last_index = 0 + self.closed = False + + if ( + type(self.instance).__name__ == "PillowPlugin" + and kwargs.get("pilmode") is not None + ): + kwargs["mode"] = kwargs["pilmode"] + del kwargs["pilmode"] + + self.read_args = kwargs + + def close(self): + if not self.closed: + self.instance.close() + self.closed = True + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __del__(self): + self.close() + + @property + def request(self): + return self.instance.request + + @property + def format(self): + raise TypeError("V3 Plugins don't have a format.") + + def get_length(self): + return self.instance.properties(index=...).n_images + + def get_data(self, index): + self.last_index = index + img = self.instance.read(index=index, **self.read_args) + metadata = self.instance.metadata(index=index, exclude_applied=False) + return Array(img, metadata) + + def get_next_data(self): + return self.get_data(self.last_index + 1) + + def set_image_index(self, index): + self.last_index = index - 1 + + def get_meta_data(self, index=None): + return self.instance.metadata(index=index, exclude_applied=False) + + def iter_data(self): + for idx, img in enumerate(self.instance.iter()): + metadata = self.instance.metadata(index=idx, exclude_applied=False) + yield Array(img, metadata) + + def __iter__(self): + return self.iter_data() + + def __len__(self): + return self.get_length() + + +class LegacyWriter: + def __init__(self, plugin_instance: PluginV3, **kwargs): + self.instance = plugin_instance + self.last_index = 0 + self.closed = False + + if type(self.instance).__name__ == "PillowPlugin" and "pilmode" in kwargs: + kwargs["mode"] = kwargs["pilmode"] + del kwargs["pilmode"] + + self.write_args = kwargs + + def close(self): + if not self.closed: + self.instance.close() + self.closed = True + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __del__(self): + self.close() + + @property + def request(self): + return self.instance.request + + @property + def format(self): + raise TypeError("V3 Plugins don't have a format.") + + def append_data(self, im, meta=None): + # TODO: write metadata in the future; there is currently no + # generic way to do this with v3 plugins :( + if meta is not None: + warnings.warn( + "V3 Plugins currently don't have a uniform way to" + " write metadata, so any metadata is ignored." + ) + + # total_meta = dict() + # if meta is None: + # meta = {} + # if hasattr(im, "meta") and isinstance(im.meta, dict): + # total_meta.update(im.meta) + # total_meta.update(meta) + + return self.instance.write(im, **self.write_args) + + def set_meta_data(self, meta): + # TODO: write metadata + raise NotImplementedError( + "V3 Plugins don't have a uniform way to write metadata (yet)." + ) + + +def is_batch(ndimage): + if isinstance(ndimage, (list, tuple)): + return True + + ndimage = np.asarray(ndimage) + if ndimage.ndim <= 2: + return False + elif ndimage.ndim == 3 and ndimage.shape[2] < 5: + return False + + return True + + +def is_volume(ndimage): + ndimage = np.asarray(ndimage) + if not is_batch(ndimage): + return False + + if ndimage.ndim == 3 and ndimage.shape[2] >= 5: + return True + elif ndimage.ndim == 4 and ndimage.shape[3] < 5: + return True + else: + return False + + +# Base functions that return a reader/writer + + +def get_reader(uri, format=None, mode="?", **kwargs): + """get_reader(uri, format=None, mode='?', **kwargs) + + Returns a :class:`.Reader` object which can be used to read data + and meta data from the specified file. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + mode : {'i', 'I', 'v', 'V', '?'} + Used to give the reader a hint on what the user expects (default "?"): + "i" for an image, "I" for multiple images, "v" for a volume, + "V" for multiple volumes, "?" for don't care. + kwargs : ... + Further keyword arguments are passed to the reader. See :func:`.help` + to see what arguments are available for a particular format. + """ + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + + image_file = imopen(uri, "r" + mode, **imopen_args) + + if isinstance(image_file, LegacyPlugin): + return image_file.legacy_get_reader(**kwargs) + else: + return LegacyReader(image_file, **kwargs) + + +def get_writer(uri, format=None, mode="?", **kwargs): + """get_writer(uri, format=None, mode='?', **kwargs) + + Returns a :class:`.Writer` object which can be used to write data + and meta data to the specified file. + + Parameters + ---------- + uri : {str, pathlib.Path, file} + The resource to write the image to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. + format : str + The format to use to write the file. By default imageio selects + the appropriate for you based on the filename. + mode : {'i', 'I', 'v', 'V', '?'} + Used to give the writer a hint on what the user expects (default '?'): + "i" for an image, "I" for multiple images, "v" for a volume, + "V" for multiple volumes, "?" for don't care. + kwargs : ... + Further keyword arguments are passed to the writer. See :func:`.help` + to see what arguments are available for a particular format. + """ + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + + image_file = imopen(uri, "w" + mode, **imopen_args) + if isinstance(image_file, LegacyPlugin): + return image_file.legacy_get_writer(**kwargs) + else: + return LegacyWriter(image_file, **kwargs) + + +# Images + + +def imread(uri, format=None, **kwargs): + """imread(uri, format=None, **kwargs) + + Reads an image from the specified file. Returns a numpy array, which + comes with a dict of meta data at its 'meta' attribute. + + Note that the image data is returned as-is, and may not always have + a dtype of uint8 (and thus may differ from what e.g. PIL returns). + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + kwargs : ... + Further keyword arguments are passed to the reader. See :func:`.help` + to see what arguments are available for a particular format. + """ + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + + with imopen(uri, "ri", **imopen_args) as file: + result = file.read(index=0, **kwargs) + + return result + + +def imwrite(uri, im, format=None, **kwargs): + """imwrite(uri, im, format=None, **kwargs) + + Write an image to the specified file. + + Parameters + ---------- + uri : {str, pathlib.Path, file} + The resource to write the image to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. + im : numpy.ndarray + The image data. Must be NxM, NxMx3 or NxMx4. + format : str + The format to use to write the file. By default imageio selects + the appropriate for you based on the filename and its contents. + kwargs : ... + Further keyword arguments are passed to the writer. See :func:`.help` + to see what arguments are available for a particular format. + """ + + # Test image + imt = type(im) + im = np.asarray(im) + if not np.issubdtype(im.dtype, np.number): + raise ValueError("Image is not numeric, but {}.".format(imt.__name__)) + + if is_batch(im) or im.ndim < 2: + raise ValueError("Image must be 2D (grayscale, RGB, or RGBA).") + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + with imopen(uri, "wi", **imopen_args) as file: + return file.write(im, **kwargs) + + +# Multiple images + + +def mimread(uri, format=None, memtest=MEMTEST_DEFAULT_MIM, **kwargs): + """mimread(uri, format=None, memtest="256MB", **kwargs) + + Reads multiple images from the specified file. Returns a list of + numpy arrays, each with a dict of meta data at its 'meta' attribute. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the images from, e.g. a filename,pathlib.Path, + http address or file object, see the docs for more info. + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + memtest : {bool, int, float, str} + If truthy, this function will raise an error if the resulting + list of images consumes greater than the amount of memory specified. + This is to protect the system from using so much memory that it needs + to resort to swapping, and thereby stall the computer. E.g. + ``mimread('hunger_games.avi')``. + + If the argument is a number, that will be used as the threshold number + of bytes. + + If the argument is a string, it will be interpreted as a number of bytes with + SI/IEC prefixed units (e.g. '1kB', '250MiB', '80.3YB'). + + - Units are case sensitive + - k, M etc. represent a 1000-fold change, where Ki, Mi etc. represent 1024-fold + - The "B" is optional, but if present, must be capitalised + + If the argument is True, the default will be used, for compatibility reasons. + + Default: '256MB' + kwargs : ... + Further keyword arguments are passed to the reader. See :func:`.help` + to see what arguments are available for a particular format. + """ + + # used for mimread and mvolread + nbyte_limit = to_nbytes(memtest, MEMTEST_DEFAULT_MIM) + + images = list() + nbytes = 0 + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + with imopen(uri, "rI", **imopen_args) as file: + for image in file.iter(**kwargs): + images.append(image) + nbytes += image.nbytes + if nbytes > nbyte_limit: + raise RuntimeError( + "imageio.mimread() has read over {}B of " + "image data.\nStopped to avoid memory problems." + " Use imageio.get_reader(), increase threshold, or memtest=False".format( + int(nbyte_limit) + ) + ) + + if len(images) == 1 and is_batch(images[0]): + images = [*images[0]] + + return images + + +def mimwrite(uri, ims, format=None, **kwargs): + """mimwrite(uri, ims, format=None, **kwargs) + + Write multiple images to the specified file. + + Parameters + ---------- + uri : {str, pathlib.Path, file} + The resource to write the images to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. + ims : sequence of numpy arrays + The image data. Each array must be NxM, NxMx3 or NxMx4. + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + kwargs : ... + Further keyword arguments are passed to the writer. See :func:`.help` + to see what arguments are available for a particular format. + """ + + if not is_batch(ims): + raise ValueError("Image data must be a sequence of ndimages.") + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + with imopen(uri, "wI", **imopen_args) as file: + return file.write(ims, is_batch=True, **kwargs) + + +# Volumes + + +def volread(uri, format=None, **kwargs): + """volread(uri, format=None, **kwargs) + + Reads a volume from the specified file. Returns a numpy array, which + comes with a dict of meta data at its 'meta' attribute. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the volume from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + kwargs : ... + Further keyword arguments are passed to the reader. See :func:`.help` + to see what arguments are available for a particular format. + """ + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + with imopen(uri, "rv", **imopen_args) as file: + return file.read(index=0, **kwargs) + + +def volwrite(uri, im, format=None, **kwargs): + """volwrite(uri, vol, format=None, **kwargs) + + Write a volume to the specified file. + + Parameters + ---------- + uri : {str, pathlib.Path, file} + The resource to write the image to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. + vol : numpy.ndarray + The image data. Must be NxMxL (or NxMxLxK if each voxel is a tuple). + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + kwargs : ... + Further keyword arguments are passed to the writer. See :func:`.help` + to see what arguments are available for a particular format. + """ + + # Test image + im = np.asarray(im) + if not is_volume(im): + raise ValueError("Image must be 3D, or 4D if each voxel is a tuple.") + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + + with imopen(uri, "wv", **imopen_args) as file: + return file.write(im, is_batch=False, **kwargs) + + +# Multiple volumes + + +def mvolread(uri, format=None, memtest=MEMTEST_DEFAULT_MVOL, **kwargs): + """mvolread(uri, format=None, memtest='1GB', **kwargs) + + Reads multiple volumes from the specified file. Returns a list of + numpy arrays, each with a dict of meta data at its 'meta' attribute. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the volumes from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + memtest : {bool, int, float, str} + If truthy, this function will raise an error if the resulting + list of images consumes greater than the amount of memory specified. + This is to protect the system from using so much memory that it needs + to resort to swapping, and thereby stall the computer. E.g. + ``mimread('hunger_games.avi')``. + + If the argument is a number, that will be used as the threshold number + of bytes. + + If the argument is a string, it will be interpreted as a number of bytes with + SI/IEC prefixed units (e.g. '1kB', '250MiB', '80.3YB'). + + - Units are case sensitive + - k, M etc. represent a 1000-fold change, where Ki, Mi etc. represent 1024-fold + - The "B" is optional, but if present, must be capitalised + + If the argument is True, the default will be used, for compatibility reasons. + + Default: '1GB' + kwargs : ... + Further keyword arguments are passed to the reader. See :func:`.help` + to see what arguments are available for a particular format. + """ + + # used for mimread and mvolread + nbyte_limit = to_nbytes(memtest, MEMTEST_DEFAULT_MVOL) + + images = list() + nbytes = 0 + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + with imopen(uri, "rV", **imopen_args) as file: + for image in file.iter(**kwargs): + images.append(image) + nbytes += image.nbytes + if nbytes > nbyte_limit: + raise RuntimeError( + "imageio.mimread() has read over {}B of " + "image data.\nStopped to avoid memory problems." + " Use imageio.get_reader(), increase threshold, or memtest=False".format( + int(nbyte_limit) + ) + ) + + return images + + +def mvolwrite(uri, ims, format=None, **kwargs): + """mvolwrite(uri, vols, format=None, **kwargs) + + Write multiple volumes to the specified file. + + Parameters + ---------- + uri : {str, pathlib.Path, file} + The resource to write the volumes to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. + ims : sequence of numpy arrays + The image data. Each array must be NxMxL (or NxMxLxK if each + voxel is a tuple). + format : str + The format to use to read the file. By default imageio selects + the appropriate for you based on the filename and its contents. + kwargs : ... + Further keyword arguments are passed to the writer. See :func:`.help` + to see what arguments are available for a particular format. + """ + + for im in ims: + if not is_volume(im): + raise ValueError("Image must be 3D, or 4D if each voxel is a tuple.") + + imopen_args = decypher_format_arg(format) + imopen_args["legacy_mode"] = True + with imopen(uri, "wV", **imopen_args) as file: + return file.write(ims, is_batch=True, **kwargs) + + +# aliases +read = get_reader +save = get_writer +imsave = imwrite +mimsave = mimwrite +volsave = volwrite +mvolsave = mvolwrite + +__all__ = [ + "imread", + "mimread", + "volread", + "mvolread", + "imwrite", + "mimwrite", + "volwrite", + "mvolwrite", + # misc + "help", + "get_reader", + "get_writer", + "RETURN_BYTES", +] diff --git a/.venv/Lib/site-packages/imageio/v2.pyi b/.venv/Lib/site-packages/imageio/v2.pyi new file mode 100644 index 00000000..19adc32e --- /dev/null +++ b/.venv/Lib/site-packages/imageio/v2.pyi @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# imageio is distributed under the terms of the (new) BSD License. + +from typing import Dict, Literal, Union, List, overload + +import numpy as np + +from .core.imopen import imopen +from .core import RETURN_BYTES, Array +from .typing import ImageResource, ArrayLike +from .core.format import Format + +MEMTEST_DEFAULT_MIM = "256MB" +MEMTEST_DEFAULT_MVOL = "1GB" + +def to_nbytes(arg: float = None, default=None) -> float: ... +def help(name=None) -> None: ... +def decypher_format_arg(format_name: Union[str, None]) -> Dict[str, str]: ... +def get_reader( + uri: ImageResource, format: Format = None, mode: str = "?", **kwargs +) -> Format.Reader: ... +def get_writer( + uri: ImageResource, format: Format = None, mode: str = "?", **kwargs +) -> Format.Writer: ... +def imread(uri: ImageResource, format: Format = None, **kwargs) -> Array: ... +@overload +def imwrite( + uri: Literal[""], im: ArrayLike, format: Format = None, **kwargs +) -> bytes: ... +@overload +def imwrite( + uri: ImageResource, im: ArrayLike, format: Format = None, **kwargs +) -> None: ... +def mimread( + uri: ImageResource, format: Format = None, memtest=MEMTEST_DEFAULT_MIM, **kwargs +) -> List[Array]: ... +@overload +def mimwrite( + uri: Literal[""], ims: List[ArrayLike], format: Format = None, **kwargs +) -> bytes: ... +@overload +def mimwrite( + uri: ImageResource, ims: List[ArrayLike], format: Format = None, **kwargs +) -> None: ... +def volread(uri: ImageResource, format: Format = None, **kwargs) -> Array: ... +@overload +def volwrite( + uri: Literal[""], im: ArrayLike, format: Format = None, **kwargs +) -> bytes: ... +@overload +def volwrite( + uri: ImageResource, im: ArrayLike, format: Format = None, **kwargs +) -> None: ... +def mvolread( + uri: ImageResource, + format: Format = None, + memtest: float = MEMTEST_DEFAULT_MVOL, + **kwargs +) -> List[Array]: ... +@overload +def mvolwrite( + uri: Literal[""], ims: List[ArrayLike], format: Format = None, **kwargs +) -> bytes: ... +@overload +def mvolwrite( + uri: ImageResource, ims: List[ArrayLike], format: Format = None, **kwargs +) -> None: ... + +# aliases +read = get_reader +save = get_writer +imsave = imwrite +mimsave = mimwrite +volsave = volwrite +mvolsave = mvolwrite diff --git a/.venv/Lib/site-packages/imageio/v3.py b/.venv/Lib/site-packages/imageio/v3.py new file mode 100644 index 00000000..65d36e57 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/v3.py @@ -0,0 +1,259 @@ +import numpy as np + +from .core.imopen import imopen + + +def imread(uri, *, index=None, plugin=None, extension=None, format_hint=None, **kwargs): + """Read an ndimage from a URI. + + Opens the given URI and reads an ndimage from it. The exact behavior + depends on both the file type and plugin used to open the file. To learn + about the exact behavior, check the documentation of the relevant plugin. + Typically, imread attempts to read all data stored in the URI. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + index : {int, Ellipsis, None} + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return it. If + index is an ellipsis (...), read all ndimages in the file and stack them + along a new batch dimension. If index is None, let the plugin decide. + plugin : {str, None} + The plugin to use. If set to None (default) imread will perform a + search for a matching plugin. If not None, this takes priority over + the provided format hint (if present). + extension : str + If not None, treat the provided ImageResource as if it had the given + extension. This affects the order in which backends are considered. + format_hint : str + Deprecated. Use `extension` instead. + **kwargs : + Additional keyword arguments will be passed to the plugin's read call. + + Returns + ------- + image : ndimage + The ndimage located at the given URI. + """ + + plugin_kwargs = { + "legacy_mode": False, + "plugin": plugin, + "format_hint": format_hint, + "extension": extension, + } + + call_kwargs = kwargs + if index is not None: + call_kwargs["index"] = index + + with imopen(uri, "r", **plugin_kwargs) as img_file: + return np.asarray(img_file.read(**call_kwargs)) + + +def imiter(uri, *, plugin=None, extension=None, format_hint=None, **kwargs): + """Read a sequence of ndimages from a URI. + + Returns an iterable that yields ndimages from the given URI. The exact + behavior depends on both, the file type and plugin used to open the file. + To learn about the exact behavior, check the documentation of the relevant + plugin. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. + plugin : {str, None} + The plugin to use. If set to None (default) imiter will perform a + search for a matching plugin. If not None, this takes priority over + the provided format hint (if present). + extension : str + If not None, treat the provided ImageResource as if it had the given + extension. This affects the order in which backends are considered. + format_hint : str + Deprecated. Use `extension` instead. + **kwargs : + Additional keyword arguments will be passed to the plugin's ``iter`` + call. + + Yields + ------ + image : ndimage + The next ndimage located at the given URI. + + """ + + with imopen( + uri, + "r", + legacy_mode=False, + plugin=plugin, + format_hint=format_hint, + extension=extension, + ) as img_file: + for image in img_file.iter(**kwargs): + # Note: casting to ndarray here to ensure compatibility + # with the v2.9 API + yield np.asarray(image) + + +def imwrite(uri, image, *, plugin=None, extension=None, format_hint=None, **kwargs): + """Write an ndimage to the given URI. + + The exact behavior depends on the file type and plugin used. To learn about + the exact behavior, check the documentation of the relevant plugin. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to save the image to, e.g. a filename, pathlib.Path, + http address or file object, check the docs for more info. + image : np.ndarray + The image to write to disk. + plugin : {str, None} + The plugin to use. If set to None (default) imwrite will perform a + search for a matching plugin. If not None, this takes priority over + the provided format hint (if present). + extension : str + If not None, treat the provided ImageResource as if it had the given + extension. This affects the order in which backends are considered, and + may also influence the format used when encoding. + format_hint : str + Deprecated. Use `extension` instead. + **kwargs : + Additional keyword arguments will be passed to the plugin's ``write`` + call. + + Returns + ------- + encoded_image : None or Bytes + Returns ``None`` in all cases, except when ``uri`` is set to ````. + In this case it returns the encoded ndimage as a bytes string. + + """ + + with imopen( + uri, + "w", + legacy_mode=False, + plugin=plugin, + format_hint=format_hint, + extension=extension, + ) as img_file: + encoded = img_file.write(image, **kwargs) + + return encoded + + +def improps(uri, *, index=None, plugin=None, extension=None, **kwargs): + """Read standardized metadata. + + Opens the given URI and reads the properties of an ndimage from it. The + properties represent standardized metadata. This means that they will have + the same name regardless of the format being read or plugin/backend being + used. Further, any field will be, where possible, populated with a sensible + default (may be `None`) if the ImageResource does not declare a value in its + metadata. + + Parameters + ---------- + index : int + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return its + properties. If index is an ellipsis (...), read all ndimages in the file + and stack them along a new batch dimension and return their properties. + If index is None, let the plugin decide. + plugin : {str, None} + The plugin to be used. If None, performs a search for a matching + plugin. + extension : str + If not None, treat the provided ImageResource as if it had the given + extension. This affects the order in which backends are considered. + **kwargs : + Additional keyword arguments will be passed to the plugin's ``properties`` + call. + + Returns + ------- + properties : ImageProperties + A dataclass filled with standardized image metadata. + + Notes + ----- + Where possible, this will avoid loading pixel data. + + See Also + -------- + imageio.core.v3_plugin_api.ImageProperties + + """ + + plugin_kwargs = {"legacy_mode": False, "plugin": plugin, "extension": extension} + + call_kwargs = kwargs + if index is not None: + call_kwargs["index"] = index + + with imopen(uri, "r", **plugin_kwargs) as img_file: + properties = img_file.properties(**call_kwargs) + + return properties + + +def immeta( + uri, *, index=None, plugin=None, extension=None, exclude_applied=True, **kwargs +): + """Read format-specific metadata. + + Opens the given URI and reads metadata for an ndimage from it. The contents + of the returned metadata dictionary is specific to both the image format and + plugin used to open the ImageResource. To learn about the exact behavior, + check the documentation of the relevant plugin. Typically, immeta returns a + dictionary specific to the image format, where keys match metadata field + names and values are a field's contents. + + Parameters + ---------- + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, http + address or file object, see the docs for more info. + index : {int, None} + If the ImageResource contains multiple ndimages, and index is an + integer, select the index-th ndimage from among them and return its + metadata. If index is an ellipsis (...), return global metadata. If + index is None, let the plugin decide the default. + plugin : {str, None} + The plugin to be used. If None (default), performs a search for a + matching plugin. + extension : str + If not None, treat the provided ImageResource as if it had the given + extension. This affects the order in which backends are considered. + **kwargs : + Additional keyword arguments will be passed to the plugin's metadata + method. + + Returns + ------- + image : ndimage + The ndimage located at the given URI. + + """ + + plugin_kwargs = {"legacy_mode": False, "plugin": plugin, "extension": extension} + + call_kwargs = kwargs + call_kwargs["exclude_applied"] = exclude_applied + if index is not None: + call_kwargs["index"] = index + + with imopen(uri, "r", **plugin_kwargs) as img_file: + metadata = img_file.metadata(**call_kwargs) + + return metadata + + +__all__ = ["imopen", "imread", "imwrite", "imiter", "improps", "immeta"] diff --git a/.venv/Lib/site-packages/imageio/v3.pyi b/.venv/Lib/site-packages/imageio/v3.pyi new file mode 100644 index 00000000..339e33e1 --- /dev/null +++ b/.venv/Lib/site-packages/imageio/v3.pyi @@ -0,0 +1,62 @@ +from typing import Any, Dict, Iterator, List, Literal, Optional, Union, overload + +import numpy as np + +from .core.imopen import imopen as imopen +from .core.v3_plugin_api import ImageProperties +from .typing import ArrayLike, ImageResource + +def imread( + uri: ImageResource, + *, + index: Optional[int] = 0, + plugin: str = None, + extension: str = None, + format_hint: str = None, + **kwargs +) -> np.ndarray: ... +def imiter( + uri: ImageResource, + *, + plugin: str = None, + extension: str = None, + format_hint: str = None, + **kwargs +) -> Iterator[np.ndarray]: ... +@overload +def imwrite( + uri: Literal[""], + image: Union[ArrayLike, List[ArrayLike]], + *, + plugin: str = None, + extension: str = None, + format_hint: str = None, + **kwargs +) -> bytes: ... +@overload +def imwrite( + uri: ImageResource, + image: Union[ArrayLike, List[ArrayLike]], + *, + plugin: str = None, + extension: str = None, + format_hint: str = None, + **kwargs +) -> None: ... +def improps( + uri, + *, + index: Optional[int] = 0, + plugin: str = None, + extension: str = None, + **kwargs +) -> ImageProperties: ... +def immeta( + uri, + *, + index: Optional[int] = 0, + plugin: str = None, + extension: str = None, + exclude_applied: bool = True, + **kwargs +) -> Dict[str, Any]: ... diff --git a/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/INSTALLER b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/LICENSE b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/LICENSE new file mode 100644 index 00000000..fc87aa36 --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2019, imageio +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/METADATA b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/METADATA new file mode 100644 index 00000000..d0179abb --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/METADATA @@ -0,0 +1,42 @@ +Metadata-Version: 2.1 +Name: imageio-ffmpeg +Version: 0.4.9 +Summary: FFMPEG wrapper for Python +Home-page: https://github.com/imageio/imageio-ffmpeg +Download-URL: http://pypi.python.org/pypi/imageio-ffmpeg +Author: imageio contributors +Author-email: almar.klein@gmail.com +License: BSD-2-Clause +Keywords: video ffmpeg +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Provides: imageio_ffmpeg +Requires-Python: >=3.5 +License-File: LICENSE +Requires-Dist: setuptools + +FFMPEG wrapper for Python. + +Note that the platform-specific wheels contain the binary executable +of ffmpeg, which makes this package around 60 MiB in size. +I guess that's the cost for being able to read/write video files. + +For Linux users: the above is not the case when installing via your +Linux package manager (if that is possible), because this package would +simply depend on ffmpeg in that case. diff --git a/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/RECORD b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/RECORD new file mode 100644 index 00000000..d9504160 --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/RECORD @@ -0,0 +1,18 @@ +imageio_ffmpeg-0.4.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +imageio_ffmpeg-0.4.9.dist-info/LICENSE,sha256=nJvpIbtQ7-Rkc86JE1sDcGHfXM3K6Xy_hKzhiw-onc4,1312 +imageio_ffmpeg-0.4.9.dist-info/METADATA,sha256=-GRdUfP4s1THIX50RUjBy_UXAzHxwxcBYsJlyBH3SIA,1652 +imageio_ffmpeg-0.4.9.dist-info/RECORD,, +imageio_ffmpeg-0.4.9.dist-info/WHEEL,sha256=f25sG_7ei0jLRCjBLI5u82SZJABUGXNw7adTdB2PUHQ,97 +imageio_ffmpeg-0.4.9.dist-info/top_level.txt,sha256=ODQYUYYbtj9I1SjASEMY7h8Q8haLrmMNURSCn0FEB18,15 +imageio_ffmpeg/__init__.py,sha256=Qa_CMd_spCLGkvmiGbUXeLdWDFxN_6CXv5L4CHLR_Yo,227 +imageio_ffmpeg/__pycache__/__init__.cpython-311.pyc,, +imageio_ffmpeg/__pycache__/_definitions.cpython-311.pyc,, +imageio_ffmpeg/__pycache__/_io.cpython-311.pyc,, +imageio_ffmpeg/__pycache__/_parsing.cpython-311.pyc,, +imageio_ffmpeg/__pycache__/_utils.cpython-311.pyc,, +imageio_ffmpeg/_definitions.py,sha256=kCKtL_R9P6USVe5Z412-lSJ-582p5L3km_so-uKQJ3Q,1739 +imageio_ffmpeg/_io.py,sha256=Z5o_LOEkwQF0sZ7lIJy44zUC6H9lTkjRVjMh5kP-0rA,27047 +imageio_ffmpeg/_parsing.py,sha256=UUtYPybzv9vQ4sKPwYlRTyaP_k9koKXfl-zI6Rm4Qpo,6840 +imageio_ffmpeg/_utils.py,sha256=MkxKIztLoyxmp123M8Z-KGkxnQmW7E9HFD9aoorhvyM,3471 +imageio_ffmpeg/binaries/README.md,sha256=sNNt-xuh6lyoc6b228wilbenlErqh1Bx2RpqrRhw530,45 +imageio_ffmpeg/binaries/ffmpeg-win64-v4.2.2.exe,sha256=QE_dVB7sxXfY8kv5qXbx5CCfQpICKkHAH1zC8bDTdaw,64654336 diff --git a/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/WHEEL b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/WHEEL new file mode 100644 index 00000000..ce2bc18c --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-win_amd64 diff --git a/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/top_level.txt b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/top_level.txt new file mode 100644 index 00000000..8adec99e --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg-0.4.9.dist-info/top_level.txt @@ -0,0 +1 @@ +imageio_ffmpeg diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/__init__.py b/.venv/Lib/site-packages/imageio_ffmpeg/__init__.py new file mode 100644 index 00000000..b79d867e --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg/__init__.py @@ -0,0 +1,8 @@ +""" imageio_ffmpeg, FFMPEG wrapper for Python. +""" + +# flake8: noqa + +from ._definitions import __version__ +from ._io import count_frames_and_secs, read_frames, write_frames +from ._utils import get_ffmpeg_exe, get_ffmpeg_version diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..929439de Binary files /dev/null and b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_definitions.cpython-311.pyc b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_definitions.cpython-311.pyc new file mode 100644 index 00000000..2744e748 Binary files /dev/null and b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_definitions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_io.cpython-311.pyc b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_io.cpython-311.pyc new file mode 100644 index 00000000..ddf8cfc1 Binary files /dev/null and b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_io.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_parsing.cpython-311.pyc b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_parsing.cpython-311.pyc new file mode 100644 index 00000000..f0d4a648 Binary files /dev/null and b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_parsing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_utils.cpython-311.pyc new file mode 100644 index 00000000..60e83f40 Binary files /dev/null and b/.venv/Lib/site-packages/imageio_ffmpeg/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/_definitions.py b/.venv/Lib/site-packages/imageio_ffmpeg/_definitions.py new file mode 100644 index 00000000..a61c46dc --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg/_definitions.py @@ -0,0 +1,54 @@ +import platform +import struct +import sys + +__version__ = "0.4.9" + + +def get_platform(): + bits = struct.calcsize("P") * 8 + if sys.platform.startswith("linux"): + architecture = platform.machine() + if architecture == "aarch64": + return "linuxaarch64" + return "linux{}".format(bits) + elif sys.platform.startswith("freebsd"): + return "freebsd{}".format(bits) + elif sys.platform.startswith("win"): + return "win{}".format(bits) + elif sys.platform.startswith("cygwin"): + return "win{}".format(bits) + elif sys.platform.startswith("darwin"): + return "osx{}".format(bits) + else: # pragma: no cover + return None + + +# The Linux static builds (https://johnvansickle.com/ffmpeg/) are build +# for Linux kernels 2.6.32 and up (at the time of writing, ffmpeg v4.1). +# This corresponds to CentOS 6. This means we should use manylinux2010 and not +# manylinux1. +# manylinux1: https://www.python.org/dev/peps/pep-0513 +# manylinux2010: https://www.python.org/dev/peps/pep-0571 + + +# Platform string -> ffmpeg filename +FNAME_PER_PLATFORM = { + "osx64": "ffmpeg-osx64-v4.2.2", # 10.10+ + "win32": "ffmpeg-win32-v4.2.2.exe", # Windows 7+ + "win64": "ffmpeg-win64-v4.2.2.exe", + # "linux32": "ffmpeg-linux32-v4.2.2", + "linux64": "ffmpeg-linux64-v4.2.2", # Kernel 3.2.0+ + "linuxaarch64": "ffmpeg-linuxaarch64-v4.2.2", +} + +osxplats = "macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64" + +# Wheel tag -> platform string +WHEEL_BUILDS = { + "py3-none-manylinux2010_x86_64": "linux64", + "py3-none-manylinux2014_aarch64": "linuxaarch64", + "py3-none-" + osxplats: "osx64", + "py3-none-win32": "win32", + "py3-none-win_amd64": "win64", +} diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/_io.py b/.venv/Lib/site-packages/imageio_ffmpeg/_io.py new file mode 100644 index 00000000..b85c4530 --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg/_io.py @@ -0,0 +1,693 @@ +import pathlib +import subprocess +import sys +import time +from collections import defaultdict +from functools import lru_cache + +from ._parsing import LogCatcher, cvsecs, parse_ffmpeg_header +from ._utils import _popen_kwargs, get_ffmpeg_exe, logger + +ISWIN = sys.platform.startswith("win") + +h264_encoder_preference = defaultdict(lambda: -1) +# The libx264 was the default encoder for a longe time with imageio +h264_encoder_preference["libx264"] = 100 + +# Encoder with the nvidia graphics card dedicated hardware +h264_encoder_preference["h264_nvenc"] = 90 +# Deprecated names for the same encoder +h264_encoder_preference["nvenc_h264"] = 90 +h264_encoder_preference["nvenc"] = 90 + +# vaapi provides hardware encoding with intel integrated graphics chipsets +h264_encoder_preference["h264_vaapi"] = 80 + +# openh264 is cisco's open source encoder +h264_encoder_preference["libopenh264"] = 70 + +h264_encoder_preference["libx264rgb"] = 50 + + +def ffmpeg_test_encoder(encoder): + # Use the null streams to validate if we can encode anything + # https://trac.ffmpeg.org/wiki/Null + cmd = [ + get_ffmpeg_exe(), + "-hide_banner", + "-f", + "lavfi", + "-i", + "nullsrc=s=256x256:d=8", + "-vcodec", + encoder, + "-f", + "null", + "-", + ] + p = subprocess.run( + cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + return p.returncode == 0 + + +def get_compiled_h264_encoders(): + cmd = [get_ffmpeg_exe(), "-hide_banner", "-encoders"] + p = subprocess.run( + cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout = p.stdout.decode().replace("\r", "") + # 2022/04/08: hmaarrfk + # I couldn't find a good way to get the list of available encoders from + # the ffmpeg command + # The ffmpeg command return a table that looks like + # Notice the leading space at the very beginning + # On ubuntu with libffmpeg-nvenc-dev we get + # $ ffmpeg -hide_banner -encoders | grep -i h.264 + # + # Encoders: + # V..... = Video + # A..... = Audio + # S..... = Subtitle + # .F.... = Frame-level multithreading + # ..S... = Slice-level multithreading + # ...X.. = Codec is experimental + # ....B. = Supports draw_horiz_band + # .....D = Supports direct rendering method 1 + # ------ + # V..... libx264 libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (codec h264) + # V..... libx264rgb libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 RGB (codec h264) + # V....D h264_nvenc NVIDIA NVENC H.264 encoder (codec h264) + # V..... h264_omx OpenMAX IL H.264 video encoder (codec h264) + # V..... h264_qsv H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (Intel Quick Sync Video acceleration) (codec h264) + # V..... h264_v4l2m2m V4L2 mem2mem H.264 encoder wrapper (codec h264) + # V....D h264_vaapi H.264/AVC (VAAPI) (codec h264) + # V..... nvenc NVIDIA NVENC H.264 encoder (codec h264) + # V..... nvenc_h264 NVIDIA NVENC H.264 encoder (codec h264) + # + # However, just because ffmpeg was compiled with the options enabled + # it doesn't mean that it will be successful + header_footer = stdout.split("------") + footer = header_footer[1].strip("\n") + encoders = [] + for line in footer.split("\n"): + # Strip to remove any leading spaces + line = line.strip() + encoder = line.split(" ")[1] + + if encoder in h264_encoder_preference: + # These encoders are known to support H.264 + # We forcibly include them in case their description changes to + # not include the string "H.264" + encoders.append(encoder) + elif (line[0] == "V") and ("H.264" in line): + encoders.append(encoder) + + encoders.sort(reverse=True, key=lambda x: h264_encoder_preference[x]) + if "h264_nvenc" in encoders: + # Remove deprecated names for the same encoder + for encoder in ["nvenc", "nvenc_h264"]: + if encoder in encoders: + encoders.remove(encoder) + # Return an immutable tuple to avoid users corrupting the lru_cache + return tuple(encoders) + + +@lru_cache() +def get_first_available_h264_encoder(): + compiled_encoders = get_compiled_h264_encoders() + for encoder in compiled_encoders: + if ffmpeg_test_encoder(encoder): + return encoder + else: + raise RuntimeError( + "No valid H.264 encoder was found with the ffmpeg installation" + ) + + +def count_frames_and_secs(path): + """ + Get the number of frames and number of seconds for the given video + file. Note that this operation can be quite slow for large files. + + Disclaimer: I've seen this produce different results from actually reading + the frames with older versions of ffmpeg (2.x). Therefore I cannot say + with 100% certainty that the returned values are always exact. + """ + # https://stackoverflow.com/questions/2017843/fetch-frame-count-with-ffmpeg + + if isinstance(path, pathlib.PurePath): + path = str(path) + if not isinstance(path, str): + raise TypeError("Video path must be a string or pathlib.Path.") + + cmd = [ + get_ffmpeg_exe(), + "-i", + path, + "-map", + "0:v:0", + "-c", + "copy", + "-f", + "null", + "-", + ] + try: + out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, **_popen_kwargs()) + except subprocess.CalledProcessError as err: + out = err.output.decode(errors="ignore") + raise RuntimeError( + "FFMPEG call failed with {}:\n{}".format(err.returncode, out) + ) + + # Note that other than with the subprocess calls below, ffmpeg wont hang here. + # Worst case Python will stop/crash and ffmpeg will continue running until done. + + nframes = nsecs = None + for line in reversed(out.splitlines()): + if line.startswith(b"frame="): + line = line.decode(errors="ignore") + i = line.find("frame=") + if i >= 0: + s = line[i:].split("=", 1)[-1].lstrip().split(" ", 1)[0].strip() + nframes = int(s) + i = line.find("time=") + if i >= 0: + s = line[i:].split("=", 1)[-1].lstrip().split(" ", 1)[0].strip() + nsecs = cvsecs(*s.split(":")) + return nframes, nsecs + + raise RuntimeError("Could not get number of frames") # pragma: no cover + + +def read_frames( + path, + pix_fmt="rgb24", + bpp=None, + input_params=None, + output_params=None, + bits_per_pixel=None, +): + """ + Create a generator to iterate over the frames in a video file. + + It first yields a small metadata dictionary that contains: + + * ffmpeg_version: the ffmpeg version in use (as a string). + * codec: a hint about the codec used to encode the video, e.g. "h264". + * source_size: the width and height of the encoded video frames. + * size: the width and height of the frames that will be produced. + * fps: the frames per second. Can be zero if it could not be detected. + * duration: duration in seconds. Can be zero if it could not be detected. + + After that, it yields frames until the end of the video is reached. Each + frame is a bytes object. + + This function makes no assumptions about the number of frames in + the data. For one because this is hard to predict exactly, but also + because it may depend on the provided output_params. If you want + to know the number of frames in a video file, use count_frames_and_secs(). + It is also possible to estimate the number of frames from the fps and + duration, but note that even if both numbers are present, the resulting + value is not always correct. + + Example: + + gen = read_frames(path) + meta = gen.__next__() + for frame in gen: + print(len(frame)) + + Parameters: + path (str): the filename of the file to read from. + pix_fmt (str): the pixel format of the frames to be read. + The default is "rgb24" (frames are uint8 RGB images). + input_params (list): Additional ffmpeg input command line parameters. + output_params (list): Additional ffmpeg output command line parameters. + bits_per_pixel (int): The number of bits per pixel in the output frames. + This depends on the given pix_fmt. Default is 24 (RGB) + bpp (int): DEPRECATED, USE bits_per_pixel INSTEAD. The number of bytes per pixel in the output frames. + This depends on the given pix_fmt. Some pixel formats like yuv420p have 12 bits per pixel + and cannot be set in bytes as integer. For this reason the bpp argument is deprecated. + """ + + # ----- Input args + + if isinstance(path, pathlib.PurePath): + path = str(path) + if not isinstance(path, str): + raise TypeError("Video path must be a string or pathlib.Path.") + # Note: Dont check whether it exists. The source could be e.g. a camera. + + pix_fmt = pix_fmt or "rgb24" + bpp = bpp or 3 + bits_per_pixel = bits_per_pixel or bpp * 8 + input_params = input_params or [] + output_params = output_params or [] + + assert isinstance(pix_fmt, str), "pix_fmt must be a string" + assert isinstance(bits_per_pixel, int), "bpp and bits_per_pixel must be an int" + assert isinstance(input_params, list), "input_params must be a list" + assert isinstance(output_params, list), "output_params must be a list" + + # ----- Prepare + + pre_output_params = ["-pix_fmt", pix_fmt, "-vcodec", "rawvideo", "-f", "image2pipe"] + + cmd = [get_ffmpeg_exe()] + cmd += input_params + ["-i", path] + cmd += pre_output_params + output_params + ["-"] + + process = subprocess.Popen( + cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + **_popen_kwargs(prevent_sigint=True) + ) + + log_catcher = LogCatcher(process.stderr) + + # Init policy by which to terminate ffmpeg. May be set to "kill" later. + stop_policy = "timeout" # not wait; ffmpeg should be able to quit quickly + + # Enter try block directly after opening the process. + # We terminate ffmpeg in the finally clause. + # Generators are automatically closed when they get deleted, + # so the finally block is guaranteed to run. + try: + # ----- Load meta data + + # Wait for the log catcher to get the meta information + etime = time.time() + 10.0 + while log_catcher.is_alive() and not log_catcher.header and time.time() < etime: + time.sleep(0.01) + + # Check whether we have the information + if not log_catcher.header: + err2 = log_catcher.get_text(0.2) + fmt = "Could not load meta information\n=== stderr ===\n{}" + raise IOError(fmt.format(err2)) + elif "No such file or directory" in log_catcher.header: + raise IOError("{} not found! Wrong path?".format(path)) + + meta = parse_ffmpeg_header(log_catcher.header) + yield meta + + # ----- Read frames + + width, height = meta["size"] + framesize_bits = width * height * bits_per_pixel + framesize_bytes = framesize_bits / 8 + assert ( + framesize_bytes.is_integer() + ), "incorrect bits_per_pixel, framesize in bytes must be an int" + framesize_bytes = int(framesize_bytes) + framenr = 0 + + while True: + framenr += 1 + try: + bb = bytes() + while len(bb) < framesize_bytes: + extra_bytes = process.stdout.read(framesize_bytes - len(bb)) + if not extra_bytes: + if len(bb) == 0: + return + else: + raise RuntimeError( + "End of file reached before full frame could be read." + ) + bb += extra_bytes + yield bb + except Exception as err: + err1 = str(err) + err2 = log_catcher.get_text(0.4) + fmt = "Could not read frame {}:\n{}\n=== stderr ===\n{}" + raise RuntimeError(fmt.format(framenr, err1, err2)) + + except GeneratorExit: + # Note that GeneratorExit does not inherit from Exception but BaseException + pass + + except Exception: + # Normal exceptions fall through + raise + + except BaseException: + # Detect KeyboardInterrupt / SystemExit: don't wait for ffmpeg to quit + stop_policy = "kill" + raise + + finally: + # Stop the LogCatcher thread, which reads from stderr. + log_catcher.stop_me() + + # Make sure that ffmpeg is terminated. + if process.poll() is None: + # Ask ffmpeg to quit + try: + # I read somewhere that modern ffmpeg on Linux prefers a + # "ctrl-c", but tests so far suggests sending q is more robust. + # > p.send_signal(signal.SIGINT) + # Sending q via communicate works, but can hang (see #17) + # > p.communicate(b"q") + # So let's do similar to what communicate does, but without + # reading stdout (which may block). It looks like only closing + # stdout is enough (tried Windows+Linux), but let's play safe. + # Found that writing to stdin can cause "Invalid argument" on + # Windows # and "Broken Pipe" on Unix. + # p.stdin.write(b"q") # commented out in v0.4.1 + process.stdout.close() + process.stdin.close() + # p.stderr.close() -> not here, the log_catcher closes it + except Exception as err: # pragma: no cover + logger.warning("Error while attempting stop ffmpeg (r): " + str(err)) + + if stop_policy == "timeout": + # Wait until timeout, produce a warning and kill if it still exists + try: + etime = time.time() + 1.5 + while time.time() < etime and process.poll() is None: + time.sleep(0.01) + finally: + if process.poll() is None: # pragma: no cover + logger.warning("We had to kill ffmpeg to stop it.") + process.kill() + + else: # stop_policy == "kill" + # Just kill it + process.kill() + + +def write_frames( + path, + size, + pix_fmt_in="rgb24", + pix_fmt_out="yuv420p", + fps=16, + quality=5, + bitrate=None, + codec=None, + macro_block_size=16, + ffmpeg_log_level="warning", + ffmpeg_timeout=None, + input_params=None, + output_params=None, + audio_path=None, + audio_codec=None, +): + """ + Create a generator to write frames (bytes objects) into a video file. + + The frames are written by using the generator's `send()` method. Frames + can be anything that can be written to a file. Typically these are + bytes objects, but c-contiguous Numpy arrays also work. + + Example: + + gen = write_frames(path, size) + gen.send(None) # seed the generator + for frame in frames: + gen.send(frame) + gen.close() # don't forget this + + Parameters: + path (str): the filename to write to. + size (tuple): the width and height of the frames. + pix_fmt_in (str): the pixel format of incoming frames. + E.g. "gray", "gray8a", "rgb24", or "rgba". Default "rgb24". + pix_fmt_out (str): the pixel format to store frames. Default yuv420p". + fps (float): The frames per second. Default 16. + quality (float): A measure for quality between 0 and 10. Default 5. + Ignored if bitrate is given. + bitrate (str): The bitrate, e.g. "192k". The defaults are pretty good. + codec (str): The codec. Default "libx264" for .mp4 (if available from + the ffmpeg executable) or "msmpeg4" for .wmv. + macro_block_size (int): You probably want to align the size of frames + to this value to avoid image resizing. Default 16. Can be set + to 1 to avoid block alignment, though this is not recommended. + ffmpeg_log_level (str): The ffmpeg logging level. Default "warning". + ffmpeg_timeout (float): Timeout in seconds to wait for ffmpeg process + to finish. Value of 0 or None will wait forever (default). The time that + ffmpeg needs depends on CPU speed, compression, and frame size. + input_params (list): Additional ffmpeg input command line parameters. + output_params (list): Additional ffmpeg output command line parameters. + audio_path (str): A input file path for encoding with an audio stream. + Default None, no audio. + audio_codec (str): The audio codec to use if audio_path is provided. + "copy" will try to use audio_path's audio codec without re-encoding. + Default None, but some formats must have certain codecs specified. + """ + + # ----- Input args + + if isinstance(path, pathlib.PurePath): + path = str(path) + if not isinstance(path, str): + raise TypeError("Video path must be a string or pathlib.Path.") + + # The pix_fmt_out yuv420p is the best for the outpur to work in + # QuickTime and most other players. These players only support + # the YUV planar color space with 4:2:0 chroma subsampling for + # H.264 video. Otherwise, depending on the source, ffmpeg may + # output to a pixel format that may be incompatible with these + # players. See https://trac.ffmpeg.org/wiki/Encode/H.264#Encodingfordumbplayers + + pix_fmt_in = pix_fmt_in or "rgb24" + pix_fmt_out = pix_fmt_out or "yuv420p" + fps = fps or 16 + # bitrate, codec, macro_block_size can all be None or ... + macro_block_size = macro_block_size or 16 + ffmpeg_log_level = ffmpeg_log_level or "warning" + input_params = input_params or [] + output_params = output_params or [] + ffmpeg_timeout = ffmpeg_timeout or 0 + + floatish = float, int + if isinstance(size, (tuple, list)): + assert len(size) == 2, "size must be a 2-tuple" + assert isinstance(size[0], int) and isinstance( + size[1], int + ), "size must be ints" + sizestr = "{:d}x{:d}".format(*size) + # elif isinstance(size, str): + # assert "x" in size, "size as string must have format NxM" + # sizestr = size + else: + assert False, "size must be str or tuple" + assert isinstance(pix_fmt_in, str), "pix_fmt_in must be str" + assert isinstance(pix_fmt_out, str), "pix_fmt_out must be str" + assert isinstance(fps, floatish), "fps must be float" + if quality is not None: + assert isinstance(quality, floatish), "quality must be float" + assert 1 <= quality <= 10, "quality must be between 1 and 10 inclusive" + assert isinstance(macro_block_size, int), "macro_block_size must be int" + assert isinstance(ffmpeg_log_level, str), "ffmpeg_log_level must be str" + assert isinstance(ffmpeg_timeout, floatish), "ffmpeg_timeout must be float" + assert isinstance(input_params, list), "input_params must be a list" + assert isinstance(output_params, list), "output_params must be a list" + + # ----- Prepare + + # Get parameters + if not codec: + if path.lower().endswith(".wmv"): + # This is a safer default codec on windows to get videos that + # will play in powerpoint and other apps. H264 is not always + # available on windows. + codec = "msmpeg4" + else: + codec = get_first_available_h264_encoder() + + audio_params = ["-an"] + if audio_path is not None and not path.lower().endswith(".gif"): + audio_params = ["-i", audio_path] + if audio_codec is not None: + output_params += ["-acodec", audio_codec] + output_params += ["-map", "0:v:0", "-map", "1:a:0"] + + # Get command + cmd = [ + get_ffmpeg_exe(), + "-y", + "-f", + "rawvideo", + "-vcodec", + "rawvideo", + "-s", + sizestr, + ] + cmd += ["-pix_fmt", pix_fmt_in, "-r", "{:.02f}".format(fps)] + input_params + cmd += ["-i", "-"] + audio_params + cmd += ["-vcodec", codec, "-pix_fmt", pix_fmt_out] + + # Add fixed bitrate or variable bitrate compression flags + if bitrate is not None: + cmd += ["-b:v", str(bitrate)] + elif quality is not None: # If None, then we don't add anything + quality = 1 - quality / 10.0 + if codec == "libx264": + # crf ranges 0 to 51, 51 being worst. + quality = int(quality * 51) + cmd += ["-crf", str(quality)] # for h264 + else: # Many codecs accept q:v + # q:v range can vary, 1-31, 31 being worst + # But q:v does not always have the same range. + # May need a way to find range for any codec. + quality = int(quality * 30) + 1 + cmd += ["-qscale:v", str(quality)] # for others + + # Note, for most codecs, the image dimensions must be divisible by + # 16 the default for the macro_block_size is 16. Check if image is + # divisible, if not have ffmpeg upsize to nearest size and warn + # user they should correct input image if this is not desired. + if macro_block_size > 1: + if size[0] % macro_block_size > 0 or size[1] % macro_block_size > 0: + out_w = size[0] + out_h = size[1] + if size[0] % macro_block_size > 0: + out_w += macro_block_size - (size[0] % macro_block_size) + if size[1] % macro_block_size > 0: + out_h += macro_block_size - (size[1] % macro_block_size) + cmd += ["-vf", "scale={}:{}".format(out_w, out_h)] + logger.warning( + "IMAGEIO FFMPEG_WRITER WARNING: input image is not" + " divisible by macro_block_size={}, resizing from {} " + "to {} to ensure video compatibility with most codecs " + "and players. To prevent resizing, make your input " + "image divisible by the macro_block_size or set the " + "macro_block_size to 1 (risking incompatibility).".format( + macro_block_size, size[:2], (out_w, out_h) + ) + ) + + # Rather than redirect stderr to a pipe, just set minimal + # output from ffmpeg by default. That way if there are warnings + # the user will see them. + cmd += ["-v", ffmpeg_log_level] + cmd += output_params + cmd.append(path) + cmd_str = " ".join(cmd) + if any( + [level in ffmpeg_log_level for level in ("info", "verbose", "debug", "trace")] + ): + logger.info("RUNNING FFMPEG COMMAND: " + cmd_str) + + # Launch process + p = subprocess.Popen( + cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=None, + **_popen_kwargs(prevent_sigint=True) + ) + + # Note that directing stderr to a pipe on windows will cause ffmpeg + # to hang if the buffer is not periodically cleared using + # StreamCatcher or other means. + # Setting bufsize to 0 or a small value does not seem to have much effect + # (tried on Windows and Linux). I suspect that ffmpeg buffers + # multiple frames (before encoding in a batch). + + # Init policy by which to terminate ffmpeg. May be set to "kill" later. + stop_policy = "timeout" + if not ffmpeg_timeout: + stop_policy = "wait" + + # ----- Write frames + + # Enter try block directly after opening the process. + # We terminate ffmpeg in the finally clause. + # Generators are automatically closed when they get deleted, + # so the finally block is guaranteed to run. + try: + # Just keep going until the generator.close() is called (raises GeneratorExit). + # This could also happen when the generator is deleted somehow. + nframes = 0 + while True: + # Get frame + bb = yield + + # framesize = size[0] * size[1] * depth * bpp + # assert isinstance(bb, bytes), "Frame must be send as bytes" + # assert len(bb) == framesize, "Frame must have width*height*depth*bpp bytes" + # Actually, we accept anything that can be written to file. + # This e.g. allows writing numpy arrays without having to make a copy ... + + # Write + try: + p.stdin.write(bb) + except Exception as err: + # Show the command and stderr from pipe + msg = ( + "{0:}\n\nFFMPEG COMMAND:\n{1:}\n\nFFMPEG STDERR " + "OUTPUT:\n".format(err, cmd_str) + ) + raise IOError(msg) + + nframes += 1 + + except GeneratorExit: + # Note that GeneratorExit does not inherit from Exception but BaseException + # Detect premature closing + if nframes == 0: + logger.warning("No frames have been written; the written video is invalid.") + + except Exception: + # Normal exceptions fall through + raise + + except BaseException: + # Detect KeyboardInterrupt / SystemExit: don't wait for ffmpeg to quit + stop_policy = "kill" + raise + + finally: + # Make sure that ffmpeg is terminated. + if p.poll() is None: + # Tell ffmpeg that we're done + try: + p.stdin.close() + except Exception as err: # pragma: no cover + logger.warning("Error while attempting stop ffmpeg (w): " + str(err)) + + if stop_policy == "timeout": + # Wait until timeout, produce a warning and kill if it still exists + try: + etime = time.time() + ffmpeg_timeout + while (time.time() < etime) and p.poll() is None: + time.sleep(0.01) + finally: + if p.poll() is None: # pragma: no cover + logger.warning( + "We had to kill ffmpeg to stop it. " + + "Consider increasing ffmpeg_timeout, " + + "or setting it to zero (no timeout)." + ) + p.kill() + + elif stop_policy == "wait": + # Wait forever, kill if it if we're interrupted + try: + while p.poll() is None: + time.sleep(0.01) + finally: # the above can raise e.g. by ctrl-c or systemexit + if p.poll() is None: # pragma: no cover + p.kill() + + else: # stop_policy == "kill": + # Just kill it + p.kill() + # Just to be safe, wrap in try/except + try: + p.stdout.close() + except Exception: + pass diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/_parsing.py b/.venv/Lib/site-packages/imageio_ffmpeg/_parsing.py new file mode 100644 index 00000000..8dd71b82 --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg/_parsing.py @@ -0,0 +1,208 @@ +import re +import threading +import time + +from ._utils import logger + + +class LogCatcher(threading.Thread): + """Thread to keep reading from stderr so that the buffer does not + fill up and stalls the ffmpeg process. On stderr a message is send + on every few frames with some meta information. We only keep the + last ones. + """ + + def __init__(self, file): + self._file = file + self._header = "" + self._lines = [] + self._remainder = b"" + threading.Thread.__init__(self) + self.daemon = True # do not let this thread hold up Python shutdown + self._should_stop = False + self.start() + + def stop_me(self): + self._should_stop = True + + @property + def header(self): + """Get header text. Empty string if the header is not yet parsed.""" + return self._header + + def get_text(self, timeout=0): + """Get the whole text written to stderr so far. To preserve + memory, only the last 50 to 100 frames are kept. + + If a timeout is given, wait for this thread to finish. When + something goes wrong, we stop ffmpeg and want a full report of + stderr, but this thread might need a tiny bit more time. + """ + + # Wait? + if timeout > 0: + etime = time.time() + timeout + while self.is_alive() and time.time() < etime: # pragma: no cover + time.sleep(0.01) + # Return str + lines = b"\n".join(self._lines) + return self._header + "\n" + lines.decode("utf-8", "ignore") + + def run(self): + # Create ref here so it still exists even if Py is shutting down + limit_lines_local = limit_lines + + while not self._should_stop: + time.sleep(0) + # Read one line. Detect when closed, and exit + try: + line = self._file.read(20) + except ValueError: # pragma: no cover + break + if not line: + break + # Process to divide in lines + line = line.replace(b"\r", b"\n").replace(b"\n\n", b"\n") + lines = line.split(b"\n") + lines[0] = self._remainder + lines[0] + self._remainder = lines.pop(-1) + # Process each line + self._lines.extend(lines) + if not self._header: + if get_output_video_line(self._lines): + header = b"\n".join(self._lines) + self._header += header.decode("utf-8", "ignore") + elif self._lines: + self._lines = limit_lines_local(self._lines) + + # Close the file when we're done + # See #61 and #69 + try: + self._file.close() + except Exception: + pass + + +def get_output_video_line(lines): + """Get the line that defines the video stream that ffmpeg outputs, + and which we read. + """ + in_output = False + for line in lines: + sline = line.lstrip() + if sline.startswith(b"Output "): + in_output = True + elif in_output: + if sline.startswith(b"Stream ") and b" Video:" in sline: + return line + + +def limit_lines(lines, N=32): + """When number of lines > 2*N, reduce to N.""" + if len(lines) > 2 * N: + lines = [b"... showing only last few lines ..."] + lines[-N:] + return lines + + +def cvsecs(*args): + """converts a time to second. Either cvsecs(min, secs) or + cvsecs(hours, mins, secs). + """ + if len(args) == 1: + return float(args[0]) + elif len(args) == 2: + return 60 * float(args[0]) + float(args[1]) + elif len(args) == 3: + return 3600 * float(args[0]) + 60 * float(args[1]) + float(args[2]) + + +def parse_ffmpeg_header(text): + lines = text.splitlines() + meta = {} + + # meta["header"] = text # Can enable this for debugging + + # Get version + ver = lines[0].split("version", 1)[-1].split("Copyright")[0] + meta["ffmpeg_version"] = ver.strip() + " " + lines[1].strip() + + # get the output line that speaks about video + videolines = [ + l for l in lines if l.lstrip().startswith("Stream ") and " Video: " in l + ] + + # Codec and pix_fmt hint + line = videolines[0] + meta["codec"] = line.split("Video: ", 1)[-1].lstrip().split(" ", 1)[0].strip() + meta["pix_fmt"] = re.split( + # use a negative lookahead regexp to ignore commas that are contained + # within a parenthesis + # this helps consider a pix_fmt of the kind + # yuv420p(tv, progressive) + # as what it is, instead of erroneously reporting as + # yuv420p(tv + r",\s*(?![^()]*\))", + line.split("Video: ", 1)[-1], + )[1].strip() + + # get the output line that speaks about audio + audiolines = [ + l for l in lines if l.lstrip().startswith("Stream ") and " Audio: " in l + ] + + if len(audiolines) > 0: + audio_line = audiolines[0] + meta["audio_codec"] = ( + audio_line.split("Audio: ", 1)[-1].lstrip().split(" ", 1)[0].strip() + ) + + # get the frame rate. + # matches can be empty, see #171, assume nframes = inf + # the regexp omits values of "1k tbr" which seems a specific edge-case #262 + # it seems that tbr is generally to be preferred #262 + fps = 0 + for line in [videolines[0]]: + matches = re.findall(r" ([0-9]+\.?[0-9]*) (fps)", line) + if matches: + fps = float(matches[0][0].strip()) + meta["fps"] = fps + + # get the size of the original stream, of the form 460x320 (w x h) + line = videolines[0] + match = re.search(" [0-9]*x[0-9]*(,| )", line) + parts = line[match.start() : match.end() - 1].split("x") + meta["source_size"] = tuple(map(int, parts)) + + # get the size of what we receive, of the form 460x320 (w x h) + line = videolines[-1] # Pipe output + match = re.search(" [0-9]*x[0-9]*(,| )", line) + parts = line[match.start() : match.end() - 1].split("x") + meta["size"] = tuple(map(int, parts)) + + # Check the two sizes + if meta["source_size"] != meta["size"]: + logger.warning( + "The frame size for reading {} is " + "different from the source frame size {}.".format( + meta["size"], meta["source_size"] + ) + ) + + # get the rotate metadata + reo_rotate = re.compile(r"rotate\s+:\s([0-9]+)") + match = reo_rotate.search(text) + rotate = 0 + if match is not None: + rotate = match.groups()[0] + meta["rotate"] = int(rotate) + + # get duration (in seconds) + line = [l for l in lines if "Duration: " in l][0] + match = re.search(" [0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9]", line) + duration = 0 + if match is not None: + hms = line[match.start() + 1 : match.end()].split(":") + duration = cvsecs(*hms) + meta["duration"] = duration + + return meta diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/_utils.py b/.venv/Lib/site-packages/imageio_ffmpeg/_utils.py new file mode 100644 index 00000000..15bd9583 --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg/_utils.py @@ -0,0 +1,117 @@ +import logging +import os +import subprocess +import sys +from functools import lru_cache + +from pkg_resources import resource_filename + +from ._definitions import FNAME_PER_PLATFORM, get_platform + +logger = logging.getLogger("imageio_ffmpeg") + + +def get_ffmpeg_exe(): + """ + Get the ffmpeg executable file. This can be the binary defined by + the IMAGEIO_FFMPEG_EXE environment variable, the binary distributed + with imageio-ffmpeg, an ffmpeg binary installed with conda, or the + system ffmpeg (in that order). A RuntimeError is raised if no valid + ffmpeg could be found. + """ + + # 1. Try environment variable. - Dont test it: the user is explicit here! + exe = os.getenv("IMAGEIO_FFMPEG_EXE", None) + if exe: + return exe + + # Auto-detect + exe = _get_ffmpeg_exe() + if exe: + return exe + + # Nothing was found + raise RuntimeError( + "No ffmpeg exe could be found. Install ffmpeg on your system, " + "or set the IMAGEIO_FFMPEG_EXE environment variable." + ) + + +@lru_cache() +def _get_ffmpeg_exe(): + plat = get_platform() + + # 2. Try from here + bin_dir = resource_filename("imageio_ffmpeg", "binaries") + exe = os.path.join(bin_dir, FNAME_PER_PLATFORM.get(plat, "")) + if exe and os.path.isfile(exe) and _is_valid_exe(exe): + return exe + + # 3. Try binary from conda package + # (installed e.g. via `conda install ffmpeg -c conda-forge`) + if plat.startswith("win"): + exe = os.path.join(sys.prefix, "Library", "bin", "ffmpeg.exe") + else: + exe = os.path.join(sys.prefix, "bin", "ffmpeg") + if exe and os.path.isfile(exe) and _is_valid_exe(exe): + return exe + + # 4. Try system ffmpeg command + exe = "ffmpeg" + if _is_valid_exe(exe): + return exe + + return None + + +def _popen_kwargs(prevent_sigint=False): + startupinfo = None + preexec_fn = None + creationflags = 0 + if sys.platform.startswith("win"): + # Stops executable from flashing on Windows (see #22) + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + if prevent_sigint: + # Prevent propagation of sigint (see #4) + # https://stackoverflow.com/questions/5045771 + if sys.platform.startswith("win"): + creationflags = 0x00000200 + else: + preexec_fn = os.setpgrp # the _pre_exec does not seem to work + + falsy = ("", "0", "false", "no") + if os.getenv("IMAGEIO_FFMPEG_NO_PREVENT_SIGINT", "").lower() not in falsy: + # Unset preexec_fn to work around a strange hang on fork() (see #58) + preexec_fn = None + + return { + "startupinfo": startupinfo, + "creationflags": creationflags, + "preexec_fn": preexec_fn, + } + + +def _is_valid_exe(exe): + cmd = [exe, "-version"] + try: + with open(os.devnull, "w") as null: + subprocess.check_call( + cmd, stdout=null, stderr=subprocess.STDOUT, **_popen_kwargs() + ) + return True + except (OSError, ValueError, subprocess.CalledProcessError): + return False + + +def get_ffmpeg_version(): + """ + Get the version of the used ffmpeg executable (as a string). + """ + exe = get_ffmpeg_exe() + line = subprocess.check_output([exe, "-version"], **_popen_kwargs()).split( + b"\n", 1 + )[0] + line = line.decode(errors="ignore").strip() + version = line.split("version", 1)[-1].lstrip().split(" ", 1)[0].strip() + return version diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/binaries/README.md b/.venv/Lib/site-packages/imageio_ffmpeg/binaries/README.md new file mode 100644 index 00000000..4f941f06 --- /dev/null +++ b/.venv/Lib/site-packages/imageio_ffmpeg/binaries/README.md @@ -0,0 +1 @@ +Exes are dropped here by the release script. diff --git a/.venv/Lib/site-packages/imageio_ffmpeg/binaries/ffmpeg-win64-v4.2.2.exe b/.venv/Lib/site-packages/imageio_ffmpeg/binaries/ffmpeg-win64-v4.2.2.exe new file mode 100644 index 00000000..290a9f02 Binary files /dev/null and b/.venv/Lib/site-packages/imageio_ffmpeg/binaries/ffmpeg-win64-v4.2.2.exe differ diff --git a/.venv/Lib/site-packages/inflect/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/inflect/__pycache__/__init__.cpython-311.pyc index 3465fe7f..a6c81765 100644 Binary files a/.venv/Lib/site-packages/inflect/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/inflect/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jamo/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/jamo/__pycache__/__init__.cpython-311.pyc index 904699ec..264009b8 100644 Binary files a/.venv/Lib/site-packages/jamo/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/jamo/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jamo/__pycache__/jamo.cpython-311.pyc b/.venv/Lib/site-packages/jamo/__pycache__/jamo.cpython-311.pyc index 2647645c..9fdd5a0e 100644 Binary files a/.venv/Lib/site-packages/jamo/__pycache__/jamo.cpython-311.pyc and b/.venv/Lib/site-packages/jamo/__pycache__/jamo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jieba/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/jieba/__pycache__/__init__.cpython-311.pyc index afbfebdc..6ca9baef 100644 Binary files a/.venv/Lib/site-packages/jieba/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/jieba/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jieba/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/jieba/__pycache__/_compat.cpython-311.pyc index 8a1a4771..8cd3f6da 100644 Binary files a/.venv/Lib/site-packages/jieba/__pycache__/_compat.cpython-311.pyc and b/.venv/Lib/site-packages/jieba/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/__init__.cpython-311.pyc index 731ac6f9..af462d01 100644 Binary files a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_emit.cpython-311.pyc b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_emit.cpython-311.pyc index f1ddac2f..3254a048 100644 Binary files a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_emit.cpython-311.pyc and b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_emit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_start.cpython-311.pyc b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_start.cpython-311.pyc index 9cce6e23..d643a0aa 100644 Binary files a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_start.cpython-311.pyc and b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_start.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_trans.cpython-311.pyc b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_trans.cpython-311.pyc index 7f11f709..06d93da1 100644 Binary files a/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_trans.cpython-311.pyc and b/.venv/Lib/site-packages/jieba/finalseg/__pycache__/prob_trans.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc index c5b329ed..8bb3f503 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc index 453be462..d643e10c 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc index 97d0641c..b18c8373 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc index 756fe83c..7ff61cbe 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc index 8c81eb0f..16603ac0 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc index 30d1b303..ab1a78f5 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/environment.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/environment.cpython-311.pyc index ca66f305..bd50f648 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/environment.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/environment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc index 46b08918..9a0fe1a2 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/filters.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/filters.cpython-311.pyc index a2dca522..adb6d445 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/filters.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/filters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc index 2e983895..df8266ad 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc index f058dc36..83fbbf00 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc index c91ee612..4d33d470 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc index 98ce58ff..1a96d0e4 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc index 649d8148..87226964 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/parser.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/parser.cpython-311.pyc index 11cb6799..d17f8f9c 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/parser.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc index 96467bb9..092fbcb1 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/tests.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/tests.cpython-311.pyc index 77161750..93abf397 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/tests.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/tests.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/utils.cpython-311.pyc index 72a3946b..98291cc9 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc b/.venv/Lib/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc index fb0ea2c7..a22850b2 100644 Binary files a/.venv/Lib/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc and b/.venv/Lib/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/__init__.cpython-311.pyc index d646e8f9..9353cb4e 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/_cloudpickle_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/_cloudpickle_wrapper.cpython-311.pyc index d6a36b10..26cb6a34 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/_cloudpickle_wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/_cloudpickle_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/_memmapping_reducer.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/_memmapping_reducer.cpython-311.pyc index 710af745..c4c8c422 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/_memmapping_reducer.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/_memmapping_reducer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/_multiprocessing_helpers.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/_multiprocessing_helpers.cpython-311.pyc index 64c7bb4c..6fb9fe32 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/_multiprocessing_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/_multiprocessing_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/_parallel_backends.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/_parallel_backends.cpython-311.pyc index c17cab3a..95f4bc8c 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/_parallel_backends.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/_parallel_backends.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/_store_backends.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/_store_backends.cpython-311.pyc index 5e1ec3a5..24e030d5 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/_store_backends.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/_store_backends.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/_utils.cpython-311.pyc index b7d60430..e672886d 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/backports.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/backports.cpython-311.pyc index 23d5a594..182c2385 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/backports.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/backports.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/compressor.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/compressor.cpython-311.pyc index 16feb078..e8018cd5 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/compressor.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/compressor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/disk.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/disk.cpython-311.pyc index bafa8523..e2b2f27a 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/disk.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/disk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/executor.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/executor.cpython-311.pyc index d9754465..050b229e 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/executor.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/executor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/func_inspect.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/func_inspect.cpython-311.pyc index cee280fe..e68d858c 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/func_inspect.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/func_inspect.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/hashing.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/hashing.cpython-311.pyc index d9d5350b..b3c09a3a 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/hashing.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/hashing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/logger.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/logger.cpython-311.pyc index 77d74606..19f802c0 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/logger.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/logger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/memory.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/memory.cpython-311.pyc index 21dde61e..499f54a7 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/memory.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/memory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle.cpython-311.pyc index 574e398f..ef3bddbd 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_compat.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_compat.cpython-311.pyc index c42889df..6b0c8b23 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_compat.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_utils.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_utils.cpython-311.pyc index 613d75b2..fd4a90d8 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_utils.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/numpy_pickle_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/parallel.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/parallel.cpython-311.pyc index f8702dd2..7b4afdcc 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/parallel.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/parallel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/__pycache__/pool.cpython-311.pyc b/.venv/Lib/site-packages/joblib/__pycache__/pool.cpython-311.pyc index 1860f2bd..ef492a60 100644 Binary files a/.venv/Lib/site-packages/joblib/__pycache__/pool.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/__pycache__/pool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/__pycache__/__init__.cpython-311.pyc index f08ce191..e490f66e 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/__init__.cpython-311.pyc index c8cc9460..ef224de4 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc index af9cc9a3..770b3acb 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-311.pyc index a0888fa5..51270c9c 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/_base.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/_base.cpython-311.pyc index 6619db12..3b047584 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/_base.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-311.pyc index 50a32604..099e840c 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/initializers.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/initializers.cpython-311.pyc index 10a7d848..14d36c6b 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/initializers.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/initializers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-311.pyc index 6bbbe04f..072506ae 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/reusable_executor.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/reusable_executor.cpython-311.pyc index cc3a1448..472c491d 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/reusable_executor.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/__pycache__/reusable_executor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-311.pyc index f10de602..ce2ee96c 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-311.pyc index 7148bf22..7f07f23b 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-311.pyc index 223dbd63..bdc533f8 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-311.pyc index fcc8adc8..b0a889fb 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-311.pyc index d7fbbcc4..50e613de 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-311.pyc index 9120d1d7..4f84f779 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-311.pyc index bf270f89..cc57c449 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-311.pyc index c63ec98c..4ab453c3 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-311.pyc index 61aff440..2638e7b2 100644 Binary files a/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jsonlines/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/jsonlines/__pycache__/__init__.cpython-311.pyc index 746c58b6..79b5d6c0 100644 Binary files a/.venv/Lib/site-packages/jsonlines/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/jsonlines/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/jsonlines/__pycache__/jsonlines.cpython-311.pyc b/.venv/Lib/site-packages/jsonlines/__pycache__/jsonlines.cpython-311.pyc index d4227017..7d8e27c4 100644 Binary files a/.venv/Lib/site-packages/jsonlines/__pycache__/jsonlines.cpython-311.pyc and b/.venv/Lib/site-packages/jsonlines/__pycache__/jsonlines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/kiwisolver/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/kiwisolver/__pycache__/__init__.cpython-311.pyc index a04fd572..4495c65e 100644 Binary files a/.venv/Lib/site-packages/kiwisolver/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/kiwisolver/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/kiwisolver/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/kiwisolver/__pycache__/exceptions.cpython-311.pyc index 6a395ba1..db6ebc58 100644 Binary files a/.venv/Lib/site-packages/kiwisolver/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/kiwisolver/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/langcodes/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/langcodes/__pycache__/__init__.cpython-311.pyc index f6c09541..7258d962 100644 Binary files a/.venv/Lib/site-packages/langcodes/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/langcodes/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/langcodes/__pycache__/data_dicts.cpython-311.pyc b/.venv/Lib/site-packages/langcodes/__pycache__/data_dicts.cpython-311.pyc index 80105b31..7f0f3b1c 100644 Binary files a/.venv/Lib/site-packages/langcodes/__pycache__/data_dicts.cpython-311.pyc and b/.venv/Lib/site-packages/langcodes/__pycache__/data_dicts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/langcodes/__pycache__/language_distance.cpython-311.pyc b/.venv/Lib/site-packages/langcodes/__pycache__/language_distance.cpython-311.pyc index 6035efe7..c17f4280 100644 Binary files a/.venv/Lib/site-packages/langcodes/__pycache__/language_distance.cpython-311.pyc and b/.venv/Lib/site-packages/langcodes/__pycache__/language_distance.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/langcodes/__pycache__/tag_parser.cpython-311.pyc b/.venv/Lib/site-packages/langcodes/__pycache__/tag_parser.cpython-311.pyc index 41ffd60b..85b379f1 100644 Binary files a/.venv/Lib/site-packages/langcodes/__pycache__/tag_parser.cpython-311.pyc and b/.venv/Lib/site-packages/langcodes/__pycache__/tag_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/lazy_loader/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/lazy_loader/__pycache__/__init__.cpython-311.pyc index 9ab818da..55eff589 100644 Binary files a/.venv/Lib/site-packages/lazy_loader/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/lazy_loader/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/librosa/__pycache__/__init__.cpython-311.pyc index d1175072..67a16c1f 100644 Binary files a/.venv/Lib/site-packages/librosa/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/__pycache__/_cache.cpython-311.pyc b/.venv/Lib/site-packages/librosa/__pycache__/_cache.cpython-311.pyc index 9d107ab1..8647fef1 100644 Binary files a/.venv/Lib/site-packages/librosa/__pycache__/_cache.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/__pycache__/_cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/librosa/__pycache__/_typing.cpython-311.pyc index ce4ccde8..1d9a5a6a 100644 Binary files a/.venv/Lib/site-packages/librosa/__pycache__/_typing.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/__pycache__/filters.cpython-311.pyc b/.venv/Lib/site-packages/librosa/__pycache__/filters.cpython-311.pyc index a7c03bbb..d5c36dbe 100644 Binary files a/.venv/Lib/site-packages/librosa/__pycache__/filters.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/__pycache__/filters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/__pycache__/sequence.cpython-311.pyc b/.venv/Lib/site-packages/librosa/__pycache__/sequence.cpython-311.pyc index 5d1e7313..bcc98340 100644 Binary files a/.venv/Lib/site-packages/librosa/__pycache__/sequence.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/__pycache__/sequence.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/librosa/__pycache__/version.cpython-311.pyc index 79567ba1..630b8402 100644 Binary files a/.venv/Lib/site-packages/librosa/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/__init__.cpython-311.pyc index 4fc1df20..f97a387a 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.1.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.1.nbc index 350e073b..535f30a1 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.1.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.2.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.2.nbc index 0ee11877..0236c11e 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.2.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.nbi b/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.nbi index 837e3f16..037dc815 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.nbi and b/.venv/Lib/site-packages/librosa/core/__pycache__/audio._zc_wrapper-1152.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/audio.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/audio.cpython-311.pyc index 2aa70810..684d937a 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/audio.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/audio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/convert.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/convert.cpython-311.pyc index cb9f3b89..ae82b86d 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/convert.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/convert.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/fft.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/fft.cpython-311.pyc index 48e8b137..dcead406 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/fft.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/fft.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.1.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.1.nbc index e8d254c6..73ad500b 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.1.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.2.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.2.nbc index daf6b59d..f5c0be28 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.2.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.nbi b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.nbi index 9ff9cb92..16ab1044 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.nbi and b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-audio._zc_wrapper-1152.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.1.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.1.nbc index 4353e209..5b13bb2d 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.1.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.2.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.2.nbc index 354bf38a..808ca985 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.2.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.nbi b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.nbi index b08de3a6..1cc7be7b 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.nbi and b/.venv/Lib/site-packages/librosa/core/__pycache__/guf-pitch._pi_wrapper-439.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/intervals.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/intervals.cpython-311.pyc index f3b1c6da..70d0694a 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/intervals.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/intervals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/notation.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/notation.cpython-311.pyc index 12812ce6..c984779b 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/notation.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/notation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.1.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.1.nbc index 82e9813f..b453f79c 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.1.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.2.nbc b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.2.nbc index b07abc24..609d6fb8 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.2.nbc and b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.nbi b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.nbi index 0662f288..04a257a9 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.nbi and b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch._pi_wrapper-439.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch.cpython-311.pyc index a47b42ca..1658a44d 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/pitch.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/pitch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/core/__pycache__/spectrum.cpython-311.pyc b/.venv/Lib/site-packages/librosa/core/__pycache__/spectrum.cpython-311.pyc index b3437da7..0e938239 100644 Binary files a/.venv/Lib/site-packages/librosa/core/__pycache__/spectrum.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/core/__pycache__/spectrum.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/librosa/util/__pycache__/__init__.cpython-311.pyc index 95a25cf7..42365253 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/util/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/librosa/util/__pycache__/decorators.cpython-311.pyc index a62a3d42..12c04e3c 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/util/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/deprecation.cpython-311.pyc b/.venv/Lib/site-packages/librosa/util/__pycache__/deprecation.cpython-311.pyc index 0fbff322..c5e964bc 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/deprecation.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/util/__pycache__/deprecation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/librosa/util/__pycache__/exceptions.cpython-311.pyc index 3b8e501d..481a555b 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/util/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.1.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.1.nbc index 049349fb..d7b9636e 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.1.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.2.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.2.nbc index f6947d99..49a0b673 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.2.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.3.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.3.nbc index cb04a356..68777355 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.3.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.3.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.4.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.4.nbc index bc3c5d9a..98fff5b8 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.4.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.4.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.5.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.5.nbc index e0bf3774..6cfaf02c 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.5.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.5.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.nbi b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.nbi index f7296108..8c51bf87 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.nbi and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmax-1051.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.1.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.1.nbc index 353bda3b..0ef9c0e8 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.1.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.2.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.2.nbc index 302ff3a9..88ecf94a 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.2.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.3.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.3.nbc index 4aa2e7fe..ac4e348e 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.3.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.3.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.4.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.4.nbc index 03b7ba02..8a840d96 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.4.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.4.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.5.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.5.nbc index f61c8b77..46ee4ee2 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.5.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.5.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.nbi b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.nbi index 01ec96dd..4a49decb 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.nbi and b/.venv/Lib/site-packages/librosa/util/__pycache__/guf-utils._localmin-1068.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._cabs2-2448.py311.nbi b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._cabs2-2448.py311.nbi index 5060c96c..1ce38eef 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._cabs2-2448.py311.nbi and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._cabs2-2448.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.1.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.1.nbc index dd6b8d9d..edf7ce6a 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.1.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.2.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.2.nbc index 02056e09..ceb79ea1 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.2.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.3.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.3.nbc index 92426ba0..1a512bc2 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.3.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.3.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.4.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.4.nbc index 30577d79..3dc16df5 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.4.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.4.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.5.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.5.nbc index 2c6d2b86..2926ddbe 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.5.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.5.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.nbi b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.nbi index 12870d40..e41385a3 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.nbi and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmax-1051.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.1.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.1.nbc index d1634aa5..88d308cc 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.1.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.1.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.2.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.2.nbc index cdcc4e2b..4edf4c2f 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.2.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.2.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.3.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.3.nbc index b2348a12..f15389b9 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.3.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.3.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.4.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.4.nbc index 65fa154b..50b017bf 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.4.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.4.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.5.nbc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.5.nbc index 47eec126..e0603de1 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.5.nbc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.5.nbc differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.nbi b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.nbi index 97f8b35f..fc315972 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.nbi and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._localmin-1068.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._phasor_angles-2500.py311.nbi b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._phasor_angles-2500.py311.nbi index 4e83ae80..d2d75a41 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils._phasor_angles-2500.py311.nbi and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils._phasor_angles-2500.py311.nbi differ diff --git a/.venv/Lib/site-packages/librosa/util/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/librosa/util/__pycache__/utils.cpython-311.pyc index f206659d..2b318950 100644 Binary files a/.venv/Lib/site-packages/librosa/util/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/librosa/util/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/__pycache__/__init__.cpython-311.pyc index 429b3db4..3fcbcbe6 100644 Binary files a/.venv/Lib/site-packages/llvmlite/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/__pycache__/_version.cpython-311.pyc index 87f24ac3..57275800 100644 Binary files a/.venv/Lib/site-packages/llvmlite/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/__pycache__/utils.cpython-311.pyc index 478d51b5..0d5bf066 100644 Binary files a/.venv/Lib/site-packages/llvmlite/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/__init__.cpython-311.pyc index f3a48a58..d2f63753 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/analysis.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/analysis.cpython-311.pyc index 1e911a20..8ddc577c 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/analysis.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/common.cpython-311.pyc index 36e9cba7..9efe5b72 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/context.cpython-311.pyc index e5423e84..ad7098f4 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/dylib.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/dylib.cpython-311.pyc index d40d0103..1f6a5391 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/dylib.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/dylib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/executionengine.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/executionengine.cpython-311.pyc index ddc60a38..ded03322 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/executionengine.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/executionengine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/ffi.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/ffi.cpython-311.pyc index 3c83d860..190e0934 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/ffi.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/ffi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/initfini.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/initfini.cpython-311.pyc index 374f694e..0d66cfdd 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/initfini.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/initfini.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/linker.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/linker.cpython-311.pyc index ef782d8e..5d5ba330 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/linker.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/linker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/module.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/module.cpython-311.pyc index 800aeba4..9e51138e 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/module.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/object_file.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/object_file.cpython-311.pyc index 48145d96..1c603bf6 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/object_file.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/object_file.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/options.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/options.cpython-311.pyc index 273d8bbd..dae0a380 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/options.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/options.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/orcjit.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/orcjit.cpython-311.pyc index e34cbcde..d286dbc4 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/orcjit.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/orcjit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/passmanagers.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/passmanagers.cpython-311.pyc index adf00393..413c9222 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/passmanagers.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/passmanagers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/targets.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/targets.cpython-311.pyc index 810fa379..0d14ac58 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/targets.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/targets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/transforms.cpython-311.pyc index 24479163..260d212d 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/typeref.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/typeref.cpython-311.pyc index 418b8c9f..f5cf2b0f 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/typeref.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/typeref.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/value.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/value.cpython-311.pyc index 4c2784c7..09c101f4 100644 Binary files a/.venv/Lib/site-packages/llvmlite/binding/__pycache__/value.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/binding/__pycache__/value.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/__init__.cpython-311.pyc index 896f4990..2a6ef269 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/_utils.cpython-311.pyc index cedc9c42..518b630d 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/builder.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/builder.cpython-311.pyc index 34757035..b7fa5e08 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/builder.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/builder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/context.cpython-311.pyc index 3ef4d77d..6b930677 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/instructions.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/instructions.cpython-311.pyc index f06f3b80..a903b7a9 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/instructions.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/instructions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/module.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/module.cpython-311.pyc index 28b1acf1..32eec909 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/module.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/transforms.cpython-311.pyc index b0c913ff..a48c76d1 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/types.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/types.cpython-311.pyc index 48c96eda..9f6e4400 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/types.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/values.cpython-311.pyc b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/values.cpython-311.pyc index 42ce6ae2..13aafb6f 100644 Binary files a/.venv/Lib/site-packages/llvmlite/ir/__pycache__/values.cpython-311.pyc and b/.venv/Lib/site-packages/llvmlite/ir/__pycache__/values.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc index 66617547..d29c10fe 100644 Binary files a/.venv/Lib/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/__init__.cpython-311.pyc index 4e9bbbe0..92ae4e07 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_afm.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_afm.cpython-311.pyc index bf264f5a..d206db25 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_afm.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_afm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_blocking_input.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_blocking_input.cpython-311.pyc index 8c0ba1b7..951651f4 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_blocking_input.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_blocking_input.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_cm.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_cm.cpython-311.pyc index 81c97c82..8bf6cb27 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_cm.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_cm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_cm_listed.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_cm_listed.cpython-311.pyc index 7e7cb374..b98f27c2 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_cm_listed.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_cm_listed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_color_data.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_color_data.cpython-311.pyc index c837afd7..348d7409 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_color_data.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_color_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_constrained_layout.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_constrained_layout.cpython-311.pyc index 49ebd691..398e6f54 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_constrained_layout.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_constrained_layout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_docstring.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_docstring.cpython-311.pyc index acdecc72..06a11932 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_docstring.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_docstring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_enums.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_enums.cpython-311.pyc index 4afbd970..96c4bab0 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_enums.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_enums.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_fontconfig_pattern.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_fontconfig_pattern.cpython-311.pyc index 16b9ad14..8a4c140a 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_fontconfig_pattern.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_fontconfig_pattern.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_layoutgrid.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_layoutgrid.cpython-311.pyc index 2d5770a7..c1fa8955 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_layoutgrid.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_layoutgrid.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext.cpython-311.pyc index 851e2f56..438884b6 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext_data.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext_data.cpython-311.pyc index 0d3cd79d..2b1f7e78 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext_data.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_mathtext_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_pylab_helpers.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_pylab_helpers.cpython-311.pyc index 74e6b4a5..b4ff6aaf 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_pylab_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_pylab_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_text_helpers.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_text_helpers.cpython-311.pyc index 3aa03a03..5710b93c 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_text_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_text_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_bbox.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_bbox.cpython-311.pyc index 6812b98d..985f8b92 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_bbox.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_bbox.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_layout.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_layout.cpython-311.pyc index 32179835..19a2bf1e 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_layout.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_tight_layout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/_version.cpython-311.pyc index cbe45ede..0986e1b0 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/artist.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/artist.cpython-311.pyc index 58cbf8d1..cfb0d79e 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/artist.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/artist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/axis.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/axis.cpython-311.pyc index 7fd52579..4972f454 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/axis.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/axis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/backend_bases.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/backend_bases.cpython-311.pyc index 40661d42..10ba3679 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/backend_bases.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/backend_bases.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/backend_managers.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/backend_managers.cpython-311.pyc index 7289b44e..e1c66345 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/backend_managers.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/backend_managers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/backend_tools.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/backend_tools.cpython-311.pyc index b460292e..f85fe72d 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/backend_tools.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/backend_tools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/bezier.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/bezier.cpython-311.pyc index 14ebb28f..3a0c4968 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/bezier.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/bezier.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/category.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/category.cpython-311.pyc index 0ff8c198..f8dbb199 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/category.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/category.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/cbook.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/cbook.cpython-311.pyc index 41194110..767747ec 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/cbook.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/cbook.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/cm.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/cm.cpython-311.pyc index 5560bfa1..837d094c 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/cm.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/cm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/collections.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/collections.cpython-311.pyc index fbc80a06..ea2b270e 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/collections.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/collections.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/colorbar.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/colorbar.cpython-311.pyc index 337b261c..4a3a2083 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/colorbar.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/colorbar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/colors.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/colors.cpython-311.pyc index ea39deb8..d4ec13c0 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/colors.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/colors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/container.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/container.cpython-311.pyc index a30b175d..91413e54 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/container.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/container.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/contour.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/contour.cpython-311.pyc index cce0a1b1..c03270ab 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/contour.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/contour.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/dates.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/dates.cpython-311.pyc index 20359818..a997628b 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/dates.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/dates.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/dviread.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/dviread.cpython-311.pyc index cfe5e73e..d3af3d8e 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/dviread.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/dviread.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/figure.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/figure.cpython-311.pyc index d3a551f7..026e1fa0 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/figure.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/figure.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/font_manager.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/font_manager.cpython-311.pyc index 71d191f9..f4aabe56 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/font_manager.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/font_manager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/gridspec.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/gridspec.cpython-311.pyc index 31485ba4..ecbbf161 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/gridspec.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/gridspec.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/hatch.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/hatch.cpython-311.pyc index 03747a27..02f5b2ec 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/hatch.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/hatch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/image.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/image.cpython-311.pyc index bf357c6a..07c57adc 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/image.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/image.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/layout_engine.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/layout_engine.cpython-311.pyc index 9d947013..fecfe9d3 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/layout_engine.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/layout_engine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/legend.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/legend.cpython-311.pyc index 4ac6d035..082c0188 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/legend.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/legend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/legend_handler.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/legend_handler.cpython-311.pyc index 3a1b59ba..a06889ab 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/legend_handler.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/legend_handler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/lines.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/lines.cpython-311.pyc index 47d6b652..e97c19d9 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/lines.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/lines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/markers.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/markers.cpython-311.pyc index c324e5ed..3933bd13 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/markers.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/markers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/mathtext.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/mathtext.cpython-311.pyc index 5a163085..8ffa644f 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/mathtext.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/mathtext.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/mlab.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/mlab.cpython-311.pyc index 12372d35..b3db89c2 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/mlab.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/mlab.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/offsetbox.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/offsetbox.cpython-311.pyc index cf283083..5cfdd236 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/offsetbox.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/offsetbox.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/patches.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/patches.cpython-311.pyc index 15c776bf..a1c923df 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/patches.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/patches.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/path.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/path.cpython-311.pyc index ea5c625e..4877e5f6 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/path.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/path.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/pyplot.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/pyplot.cpython-311.pyc index 452758aa..f4f68aa2 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/pyplot.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/pyplot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/quiver.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/quiver.cpython-311.pyc index f76f606a..408c94ba 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/quiver.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/quiver.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/rcsetup.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/rcsetup.cpython-311.pyc index 33821a33..2c9bef6e 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/rcsetup.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/rcsetup.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/scale.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/scale.cpython-311.pyc index 13466522..7f26f98f 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/scale.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/scale.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/spines.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/spines.cpython-311.pyc index 7391ab70..958dae03 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/spines.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/spines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/stackplot.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/stackplot.cpython-311.pyc index 05700e54..2679a193 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/stackplot.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/stackplot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/streamplot.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/streamplot.cpython-311.pyc index efe1c997..d875fe06 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/streamplot.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/streamplot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/table.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/table.cpython-311.pyc index 2edb6605..5bca269a 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/table.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/table.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/texmanager.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/texmanager.cpython-311.pyc index a7e1b0ce..ff26879c 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/texmanager.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/texmanager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/text.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/text.cpython-311.pyc index a6cee069..128b030c 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/text.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/text.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/textpath.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/textpath.cpython-311.pyc index 9104a877..bf7d8950 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/textpath.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/textpath.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/ticker.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/ticker.cpython-311.pyc index af190cf1..1fbbedbc 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/ticker.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/ticker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/transforms.cpython-311.pyc index 1907d3fe..9976429b 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/units.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/units.cpython-311.pyc index a4500106..0da675be 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/units.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/units.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/__pycache__/widgets.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/__pycache__/widgets.cpython-311.pyc index c64d13d7..2f47bbbf 100644 Binary files a/.venv/Lib/site-packages/matplotlib/__pycache__/widgets.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/__pycache__/widgets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/_api/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/_api/__pycache__/__init__.cpython-311.pyc index a2ec8d9a..5fa06cdb 100644 Binary files a/.venv/Lib/site-packages/matplotlib/_api/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/_api/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/_api/__pycache__/deprecation.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/_api/__pycache__/deprecation.cpython-311.pyc index af1440c5..46293c89 100644 Binary files a/.venv/Lib/site-packages/matplotlib/_api/__pycache__/deprecation.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/_api/__pycache__/deprecation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/__init__.cpython-311.pyc index 5c1c94e6..31da5e7f 100644 Binary files a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_axes.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_axes.cpython-311.pyc index 0070763f..edad52fb 100644 Binary files a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_axes.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_axes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_base.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_base.cpython-311.pyc index 553ffcce..c4b03f59 100644 Binary files a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_base.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_secondary_axes.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_secondary_axes.cpython-311.pyc index 78513cba..c5c9ce9d 100644 Binary files a/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_secondary_axes.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/axes/__pycache__/_secondary_axes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/backends/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/backends/__pycache__/__init__.cpython-311.pyc index c7da6739..1b5e9e65 100644 Binary files a/.venv/Lib/site-packages/matplotlib/backends/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/backends/__pycache__/backend_agg.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/backends/__pycache__/backend_agg.cpython-311.pyc index 79172d5e..76a79954 100644 Binary files a/.venv/Lib/site-packages/matplotlib/backends/__pycache__/backend_agg.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/backends/__pycache__/backend_agg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/projections/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/projections/__pycache__/__init__.cpython-311.pyc index d744287f..d5c44d52 100644 Binary files a/.venv/Lib/site-packages/matplotlib/projections/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/projections/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/projections/__pycache__/geo.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/projections/__pycache__/geo.cpython-311.pyc index a33a634a..86794458 100644 Binary files a/.venv/Lib/site-packages/matplotlib/projections/__pycache__/geo.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/projections/__pycache__/geo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/projections/__pycache__/polar.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/projections/__pycache__/polar.cpython-311.pyc index 29029f5c..70cdc7ae 100644 Binary files a/.venv/Lib/site-packages/matplotlib/projections/__pycache__/polar.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/projections/__pycache__/polar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/style/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/style/__pycache__/__init__.cpython-311.pyc index 61ee778b..1b27d31a 100644 Binary files a/.venv/Lib/site-packages/matplotlib/style/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/style/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/style/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/style/__pycache__/core.cpython-311.pyc index f9a98d35..1e4b8c80 100644 Binary files a/.venv/Lib/site-packages/matplotlib/style/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/style/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/__init__.cpython-311.pyc index ba8f8fd8..ce7bfad1 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triangulation.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triangulation.cpython-311.pyc index 521b391e..7d8b9fdb 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triangulation.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triangulation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tricontour.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tricontour.cpython-311.pyc index 09707acd..cb3570b0 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tricontour.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tricontour.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trifinder.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trifinder.cpython-311.pyc index 9e9283b0..694de96d 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trifinder.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trifinder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triinterpolate.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triinterpolate.cpython-311.pyc index f63c96e9..1e0577ee 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triinterpolate.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triinterpolate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tripcolor.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tripcolor.cpython-311.pyc index c8dcd9f3..647530e8 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tripcolor.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tripcolor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triplot.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triplot.cpython-311.pyc index f5f3d8f5..59ccc3e4 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triplot.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_triplot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trirefine.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trirefine.cpython-311.pyc index 5a5f2f86..b3923e9b 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trirefine.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_trirefine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tritools.cpython-311.pyc b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tritools.cpython-311.pyc index 802e845e..51daa2ba 100644 Binary files a/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tritools.cpython-311.pyc and b/.venv/Lib/site-packages/matplotlib/tri/__pycache__/_tritools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/more_itertools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/more_itertools/__pycache__/__init__.cpython-311.pyc index 15008ee1..b641dafa 100644 Binary files a/.venv/Lib/site-packages/more_itertools/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/more_itertools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/more_itertools/__pycache__/more.cpython-311.pyc b/.venv/Lib/site-packages/more_itertools/__pycache__/more.cpython-311.pyc index 0446b836..92407e49 100644 Binary files a/.venv/Lib/site-packages/more_itertools/__pycache__/more.cpython-311.pyc and b/.venv/Lib/site-packages/more_itertools/__pycache__/more.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/more_itertools/__pycache__/recipes.cpython-311.pyc b/.venv/Lib/site-packages/more_itertools/__pycache__/recipes.cpython-311.pyc index 5f311747..ea1ae04b 100644 Binary files a/.venv/Lib/site-packages/more_itertools/__pycache__/recipes.cpython-311.pyc and b/.venv/Lib/site-packages/more_itertools/__pycache__/recipes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/INSTALLER b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/LICENCE.txt b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/LICENCE.txt new file mode 100644 index 00000000..99c737e5 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/LICENCE.txt @@ -0,0 +1,25 @@ +The MIT License (MIT) +[OSI Approved License] + +The MIT License (MIT) + +Copyright (c) 2015 Zulko + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + diff --git a/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/METADATA b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/METADATA new file mode 100644 index 00000000..c3d511f5 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/METADATA @@ -0,0 +1,295 @@ +Metadata-Version: 2.1 +Name: moviepy +Version: 1.0.3 +Summary: Video editing with Python +Home-page: https://zulko.github.io/moviepy/ +Author: Zulko 2017 +License: MIT License +Keywords: video editing audio compositing ffmpeg +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Multimedia +Classifier: Topic :: Multimedia :: Sound/Audio +Classifier: Topic :: Multimedia :: Sound/Audio :: Analysis +Classifier: Topic :: Multimedia :: Video +Classifier: Topic :: Multimedia :: Video :: Capture +Classifier: Topic :: Multimedia :: Video :: Conversion +License-File: LICENCE.txt +Requires-Dist: decorator <5.0,>=4.0.2 +Requires-Dist: tqdm <5.0,>=4.11.2 +Requires-Dist: requests <3.0,>=2.8.1 +Requires-Dist: proglog <=1.0.0 +Requires-Dist: numpy >=1.17.3 ; python_version != "2.7" +Requires-Dist: imageio <2.5,>=2.0 ; python_version < "3.4" +Requires-Dist: numpy ; python_version >= "2.7" +Requires-Dist: imageio <3.0,>=2.5 ; python_version >= "3.4" +Requires-Dist: imageio-ffmpeg >=0.2.0 ; python_version >= "3.4" +Provides-Extra: doc +Requires-Dist: numpydoc <1.0,>=0.6.0 ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme <1.0,>=0.1.10b0 ; extra == 'doc' +Requires-Dist: Sphinx <2.0,>=1.5.2 ; extra == 'doc' +Requires-Dist: pygame <2.0,>=1.9.3 ; (python_version < "3.8") and extra == 'doc' +Provides-Extra: optional +Requires-Dist: youtube-dl ; extra == 'optional' +Requires-Dist: opencv-python <4.0,>=3.0 ; (python_version != "2.7") and extra == 'optional' +Requires-Dist: scipy <1.5,>=0.19.0 ; (python_version != "3.3") and extra == 'optional' +Requires-Dist: scikit-image <1.0,>=0.13.0 ; (python_version >= "3.4") and extra == 'optional' +Requires-Dist: scikit-learn ; (python_version >= "3.4") and extra == 'optional' +Requires-Dist: matplotlib <3.0,>=2.0.0 ; (python_version >= "3.4") and extra == 'optional' +Provides-Extra: test +Requires-Dist: coverage <5.0 ; extra == 'test' +Requires-Dist: coveralls <2.0,>=1.1 ; extra == 'test' +Requires-Dist: pytest-cov <3.0,>=2.5.1 ; extra == 'test' +Requires-Dist: pytest <4.0,>=3.0.0 ; extra == 'test' +Requires-Dist: requests <3.0,>=2.8.1 ; extra == 'test' + +MoviePy +======= + +.. image:: https://badge.fury.io/py/moviepy.svg + :target: PyPI_ + :alt: MoviePy page on the Python Package Index +.. image:: https://badges.gitter.im/movie-py/gitter.png + :target: Gitter_ + :alt: Discuss MoviePy on Gitter +.. image:: https://travis-ci.org/Zulko/moviepy.svg?branch=master + :target: https://travis-ci.org/Zulko/moviepy + :alt: Build status on travis-ci +.. image:: https://ci.appveyor.com/api/projects/status/github/zulko/moviepy?svg=true + :target: https://ci.appveyor.com/project/Zulko/moviepy + :alt: Build status on appveyor +.. image:: https://coveralls.io/repos/github/Zulko/moviepy/badge.svg?branch=master + :target: https://coveralls.io/github/Zulko/moviepy?branch=master + :alt: Code coverage from coveralls.io + +MoviePy (full documentation_) is a Python library for video editing: cutting, concatenations, title insertions, video compositing (a.k.a. non-linear editing), video processing, and creation of custom effects. See the gallery_ for some examples of use. + +MoviePy can read and write all the most common audio and video formats, including GIF, and runs on Windows/Mac/Linux, with Python 2.7+ and 3 (or only Python 3.4+ from v.1.0). Here it is in action in an IPython notebook: + +.. image:: https://raw.githubusercontent.com/Zulko/moviepy/master/docs/demo_preview.jpeg + :alt: [logo] + :align: center + +Example +------- + +In this example we open a video file, select the subclip between t=50s and t=60s, add a title at the center of the screen, and write the result to a new file: + +.. code:: python + + from moviepy.editor import * + + video = VideoFileClip("myHolidays.mp4").subclip(50,60) + + # Make the text. Many more options are available. + txt_clip = ( TextClip("My Holidays 2013",fontsize=70,color='white') + .set_position('center') + .set_duration(10) ) + + result = CompositeVideoClip([video, txt_clip]) # Overlay text on video + result.write_videofile("myHolidays_edited.webm",fps=25) # Many options... + + +Maintainers wanted! +------------------- + +As there are more and more people seeking support (320 open issues as of Sept. 2019!) and all the MoviePy maintainers seem busy, we'd love to hear about developers interested in giving a hand and solving some of the issues (especially the ones that affect you) or reviewing pull requests. Open an issue or contact us directly if you are interested. Thanks! + +Installation +------------ + +MoviePy depends on the Python modules Numpy_, imageio_, Decorator_, and tqdm_, which will be automatically installed during MoviePy's installation. The software FFMPEG should be automatically downloaded/installed (by imageio) during your first use of MoviePy (installation will take a few seconds). If you want to use a specific version of FFMPEG, follow the instructions in ``config_defaults.py``. In case of trouble, provide feedback. + +**Installation by hand:** download the sources, either from PyPI_ or, if you want the development version, from GitHub_, unzip everything into one folder, open a terminal and type: + +.. code:: bash + + $ (sudo) python setup.py install + +**Installation with pip:** if you have ``pip`` installed, just type this in a terminal: + +.. code:: bash + + $ (sudo) pip install moviepy + +If you have neither ``setuptools`` nor ``ez_setup`` installed, the command above will fail. In this case type this before installing: + +.. code:: bash + + $ (sudo) pip install ez_setup + + +Optional but useful dependencies +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can install ``moviepy`` with all dependencies via: + +.. code:: bash + + $ (sudo) pip install moviepy[optional] + +ImageMagick_ is not strictly required, but needed if you want to incorporate texts. It can also be used as a backend for GIFs, though you can also create GIFs with MoviePy without ImageMagick. + +Once you have installed ImageMagick, it will be automatically detected by MoviePy, **except on Windows!** Windows users, before installing MoviePy by hand, need to edit ``moviepy/config_defaults.py`` to provide the path to the ImageMagick binary, which is called `convert`. It should look like this: + +.. code:: python + + IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\convert.exe" + +PyGame_ is needed for video and sound previews (not relevant if you intend to work with MoviePy on a server but essential for advanced video editing by hand). + +For advanced image processing, you will need one or several of the following packages: + +- The Python Imaging Library (PIL) or, even better, its branch Pillow_. +- Scipy_ (for tracking, segmenting, etc.) can be used to resize video clips if PIL and OpenCV are not installed. +- `Scikit Image`_ may be needed for some advanced image manipulation. +- `OpenCV 2.4.6`_ or a more recent version (one that provides the package ``cv2``) may be needed for some advanced image manipulation. +- `Matplotlib`_ + +Once you have installed it, ImageMagick will be automatically detected by MoviePy, (except for windows users and Ubuntu 16.04LTS users). + +For Windows users, before installing MoviePy by hand, go into the ``moviepy/config_defaults.py`` file and provide the path to the ImageMagick binary called ``magick``. It should look like this: + +.. code:: python + + IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe" + +If you are using an older version of ImageMagick, keep in mind the name of the executable is not ``magick.exe`` but ``convert.exe``. In that case, the IMAGEMAGICK_BINARY property should be ``C:\\Program Files\\ImageMagick_VERSION\\convert.exe`` + +For Ubuntu 16.04LTS users, after installing MoviePy on the terminal, IMAGEMAGICK will not be detected by moviepy. This bug can be fixed. Modify the file in this directory: /etc/ImageMagick-6/policy.xml, comment out the statement . + +PyGame_ is needed for video and sound previews (useless if you intend to work with MoviePy on a server but really essential for advanced video editing *by hand*). + +For instance, using the method ``clip.resize`` requires that at least one of Scipy, PIL, Pillow or OpenCV is installed. + + +Documentation +------------- + +Running `build_docs` has additional dependencies that require installation. + +.. code:: bash + + $ (sudo) pip install moviepy[docs] + +The documentation can be generated and viewed via: + +.. code:: bash + + $ python setup.py build_docs + +You can pass additional arguments to the documentation build, such as clean build: + +.. code:: bash + + $ python setup.py build_docs -E + +More information is available from the `Sphinx`_ documentation. + +New in 1.0.0: Progress bars and messages with Proglog +------------------------------------------------------- + +Non-backwards-compatible changes were introduced in 1.0.0 to +manage progress bars and messages using +`Proglog `_, which +enables to display nice progress bars in the console as well as in +a Jupyter notebook or any user interface, like a website. + +To display notebook friendly progress bars, first install IPyWidgets: + +.. code:: + + sudo pip install ipywidgets + sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension + +Then at the beginning of your notebook enter: + +.. code:: python + + import proglog + proglog.notebook() + +Have a look at the Proglog project page for more options. + +Running Tests +------------- + +In order to run the test suite locally, first install the dependencies by navigating to the project directory and running: + +.. code:: bash + + $ (sudo) pip install moviepy[test] + +The test suite can then be executed via: + +.. code:: bash + + $ pytest + + +Contribute +---------- + +MoviePy is open-source software originally written by Zulko_ and released under the MIT licence. The project is hosted on GitHub_, where everyone is welcome to contribute, ask for help or simply give feedback. Please read our `Contributing Guidelines`_ for more information about how to contribute! + +You can also discuss the project on Reddit_ or Gitter_. These are preferred over GitHub issues for usage questions and examples. + + +Maintainers +----------- + +- Zulko_ (owner) +- `@tburrows13`_ +- `@mgaitan`_ +- `@earney`_ +- `@mbeacom`_ +- `@overdrivr`_ +- `@keikoro`_ +- `@ryanfox`_ + + +.. MoviePy links +.. _gallery: https://zulko.github.io/moviepy/gallery.html +.. _documentation: https://zulko.github.io/moviepy/ +.. _`download MoviePy`: https://github.com/Zulko/moviepy +.. _`Label Wiki`: https://github.com/Zulko/moviepy/wiki/Label-Wiki +.. _Contributing Guidelines: https://github.com/Zulko/moviepy/blob/master/CONTRIBUTING.md + +.. Websites, Platforms +.. _Reddit: https://www.reddit.com/r/moviepy/ +.. _PyPI: https://pypi.python.org/pypi/moviepy +.. _GitHub: https://github.com/Zulko/moviepy +.. _Gitter: https://gitter.im/movie-py/Lobby + +.. Software, Tools, Libraries +.. _Pillow: https://pillow.readthedocs.org/en/latest/ +.. _Scipy: https://www.scipy.org/ +.. _`OpenCV 2.4.6`: https://sourceforge.net/projects/opencvlibrary/files/ +.. _Pygame: https://www.pygame.org/download.shtml +.. _Numpy: https://www.scipy.org/install.html +.. _imageio: https://imageio.github.io/ +.. _`Scikit Image`: https://scikit-image.org/docs/stable/install.html +.. _Decorator: https://pypi.python.org/pypi/decorator +.. _tqdm: https://github.com/noamraph/tqdm +.. _ffmpeg: https://www.ffmpeg.org/download.html +.. _ImageMagick: https://www.imagemagick.org/script/index.php +.. _`Matplotlib`: https://matplotlib.org/ +.. _`Sphinx`: https://www.sphinx-doc.org/en/master/setuptools.html + +.. People +.. _Zulko: https://github.com/Zulko +.. _`@mgaitan`: https://github.com/mgaitan +.. _`@tburrows13`: https://github.com/tburrows13 +.. _`@earney`: https://github.com/earney +.. _`@mbeacom`: https://github.com/mbeacom +.. _`@overdrivr`: https://github.com/overdrivr +.. _`@keikoro`: https://github.com/keikoro +.. _`@ryanfox`: https://github.com/ryanfox diff --git a/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/RECORD b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/RECORD new file mode 100644 index 00000000..1f7bed4d --- /dev/null +++ b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/RECORD @@ -0,0 +1,181 @@ +moviepy-1.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +moviepy-1.0.3.dist-info/LICENCE.txt,sha256=AdwAlHG0Q8vAzROYN8BFt1Em-GmUii725Bno1yGvUho,1119 +moviepy-1.0.3.dist-info/METADATA,sha256=75z-PFwx5sDHtLHQEKjPUx8dm9592JOGHkOlIhC_mig,12676 +moviepy-1.0.3.dist-info/RECORD,, +moviepy-1.0.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +moviepy-1.0.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +moviepy-1.0.3.dist-info/top_level.txt,sha256=2AuAzBHXS1RtsoIY43fmVUdf9ZmzoOHlkkBOmciPVBU,8 +moviepy/Clip.py,sha256=T5J4kgcTrOlRtpROEUWtNkZmeHanTOHWcAXtaN11J88,16025 +moviepy/__init__.py,sha256=Jzb_RfgvXCrm_SQ4AfeGVi1N36YybxnM5mpyxrnihgI,33 +moviepy/__pycache__/Clip.cpython-311.pyc,, +moviepy/__pycache__/__init__.cpython-311.pyc,, +moviepy/__pycache__/compat.cpython-311.pyc,, +moviepy/__pycache__/config.cpython-311.pyc,, +moviepy/__pycache__/config_defaults.cpython-311.pyc,, +moviepy/__pycache__/decorators.cpython-311.pyc,, +moviepy/__pycache__/editor.cpython-311.pyc,, +moviepy/__pycache__/tools.cpython-311.pyc,, +moviepy/__pycache__/utils.cpython-311.pyc,, +moviepy/__pycache__/version.cpython-311.pyc,, +moviepy/audio/AudioClip.py,sha256=blce9zksB5CgOwkBzzbiqsuAuNr2YSmUczJNbMHuIYk,10899 +moviepy/audio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +moviepy/audio/__pycache__/AudioClip.cpython-311.pyc,, +moviepy/audio/__pycache__/__init__.cpython-311.pyc,, +moviepy/audio/fx/__init__.py,sha256=QiLpB-Tq7n8ywyj769E67G47_TIF8uW7SICHSb3BZc4,121 +moviepy/audio/fx/__pycache__/__init__.cpython-311.pyc,, +moviepy/audio/fx/__pycache__/audio_fadein.cpython-311.pyc,, +moviepy/audio/fx/__pycache__/audio_fadeout.cpython-311.pyc,, +moviepy/audio/fx/__pycache__/audio_left_right.cpython-311.pyc,, +moviepy/audio/fx/__pycache__/audio_loop.cpython-311.pyc,, +moviepy/audio/fx/__pycache__/audio_normalize.cpython-311.pyc,, +moviepy/audio/fx/__pycache__/volumex.cpython-311.pyc,, +moviepy/audio/fx/all/__init__.py,sha256=65Vuie1xarBAaMlJ-Ej4MqvVtSF-KOqE2hh0bmqKUus,318 +moviepy/audio/fx/all/__pycache__/__init__.cpython-311.pyc,, +moviepy/audio/fx/audio_fadein.py,sha256=c5LXJow5UmuAD32L9nwMXfMmgdaN2zf2Tq4i0pCLkYc,632 +moviepy/audio/fx/audio_fadeout.py,sha256=zR61frbcZ1lDmMa04JNpgru7HA1oS7F3XkL5LQFMWjk,703 +moviepy/audio/fx/audio_left_right.py,sha256=nIY9aEyCJVZLBJEyOiUDv490RY-1qdsjzWMkC4oN4j0,501 +moviepy/audio/fx/audio_loop.py,sha256=_CYIkNHVGEToWDaEt8fv8yNLkRAEkB6g9BDop8W3mbo,761 +moviepy/audio/fx/audio_normalize.py,sha256=YybQD5AFBjm_dEe3aZ5Se7LCc010lbojPCaZpiWv_KE,530 +moviepy/audio/fx/volumex.py,sha256=-iL2D2rmwJQ39BwqJ1iXitPoEBkNME62USW-3d4Za78,657 +moviepy/audio/io/AudioFileClip.py,sha256=o7_uluimUOXWwXCVpij68yspmEkUC8R64AGti66DrM4,2801 +moviepy/audio/io/__init__.py,sha256=ozgif9QB2xrWan5VQoAMRbxql6T2RaNA9bNDq6kUtsM,62 +moviepy/audio/io/__pycache__/AudioFileClip.cpython-311.pyc,, +moviepy/audio/io/__pycache__/__init__.cpython-311.pyc,, +moviepy/audio/io/__pycache__/ffmpeg_audiowriter.cpython-311.pyc,, +moviepy/audio/io/__pycache__/preview.cpython-311.pyc,, +moviepy/audio/io/__pycache__/readers.cpython-311.pyc,, +moviepy/audio/io/ffmpeg_audiowriter.py,sha256=ssVCDxoR9ZE5pV7ZrkYDHVm7_A0I4k1s0k_wGoL17xc,6265 +moviepy/audio/io/preview.py,sha256=MFotqzyd40vtJEJB4x0F8wR7RyXUWIhMGw7Ag5fahC0,2187 +moviepy/audio/io/readers.py,sha256=YPdZOlhpXUfy16SJUKEM1JVkSgSG8fxbLOnGvEH02k8,8557 +moviepy/audio/tools/__init__.py,sha256=ngcg4cOdZb-tTf72nTMGavfAUK0IhHOB1mMW4dQCkn0,48 +moviepy/audio/tools/__pycache__/__init__.cpython-311.pyc,, +moviepy/audio/tools/__pycache__/cuts.cpython-311.pyc,, +moviepy/audio/tools/cuts.py,sha256=_YGlGWBXJIe7oBfhCUHk8mjvEajha7Mev8feVoVKjMs,686 +moviepy/compat.py,sha256=Nba-F62uAuVOv1EJTuwwGqWhr1VyMPW0b2uxS_qVV0Y,303 +moviepy/config.py,sha256=HwS1uQxirlCuDOxuaXLYAYImI0fpubiiPlikqrhGVU4,3474 +moviepy/config_defaults.py,sha256=zHwjL-u7p2TDABWkEE7NUsP-1WM9Hr2uZ-Xs85j7VdU,1571 +moviepy/decorators.py,sha256=mBQ6kz8FRNpIh6xjOQtPW3UVoINZ6M-hlb_VeIHhBq8,3868 +moviepy/editor.py,sha256=lnpUJqk42qfYhL1ReAOMsnGxVapgZi2psKTh8Y8uya8,4108 +moviepy/tools.py,sha256=mUC5IBvpBNacXZZ6gC2G76CnhzKeyxN6fV9VSQJogu8,5260 +moviepy/utils.py,sha256=cLVYHsLxHi1ffLeTl7lYhz-GME5m9Juu5AvmgG1HTSM,609 +moviepy/version.py,sha256=2plzdEEb24FLjE2I2XyBBcJEPYWHccNL4SgtLC_6erg,22 +moviepy/video/VideoClip.py,sha256=nUr7jDP-c2CgjRFw_ImDFqW8ObJEHhdsre1HlQhQ6vE,41252 +moviepy/video/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +moviepy/video/__pycache__/VideoClip.cpython-311.pyc,, +moviepy/video/__pycache__/__init__.cpython-311.pyc,, +moviepy/video/compositing/CompositeVideoClip.py,sha256=3U5OoeAaWokpsbLLmY-VwgyUthsbYF7h-pXZV2ufTEg,5657 +moviepy/video/compositing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +moviepy/video/compositing/__pycache__/CompositeVideoClip.cpython-311.pyc,, +moviepy/video/compositing/__pycache__/__init__.cpython-311.pyc,, +moviepy/video/compositing/__pycache__/concatenate.cpython-311.pyc,, +moviepy/video/compositing/__pycache__/on_color.cpython-311.pyc,, +moviepy/video/compositing/__pycache__/positioning.cpython-311.pyc,, +moviepy/video/compositing/__pycache__/transitions.cpython-311.pyc,, +moviepy/video/compositing/concatenate.py,sha256=YVQnt4GlZYIYYtIGFfsF64pDsUu1JHCMpAEueo7QZv4,4604 +moviepy/video/compositing/on_color.py,sha256=DjXkm8y8-vmUo2QfkNFjVr5SP8T7IhYoIDJv1KJzOtM,1053 +moviepy/video/compositing/positioning.py,sha256=6fk_iD_juI-cuReU92JgBE8PImPogfeJ4vUhhW9vx84,87 +moviepy/video/compositing/transitions.py,sha256=bZWjbjVkYhBCVRIeGT8ydha9XES-YUguVZLCik1Uzv4,3926 +moviepy/video/fx/__init__.py,sha256=QiLpB-Tq7n8ywyj769E67G47_TIF8uW7SICHSb3BZc4,121 +moviepy/video/fx/__pycache__/__init__.cpython-311.pyc,, +moviepy/video/fx/__pycache__/accel_decel.cpython-311.pyc,, +moviepy/video/fx/__pycache__/blackwhite.cpython-311.pyc,, +moviepy/video/fx/__pycache__/blink.cpython-311.pyc,, +moviepy/video/fx/__pycache__/colorx.cpython-311.pyc,, +moviepy/video/fx/__pycache__/crop.cpython-311.pyc,, +moviepy/video/fx/__pycache__/even_size.cpython-311.pyc,, +moviepy/video/fx/__pycache__/fadein.cpython-311.pyc,, +moviepy/video/fx/__pycache__/fadeout.cpython-311.pyc,, +moviepy/video/fx/__pycache__/freeze.cpython-311.pyc,, +moviepy/video/fx/__pycache__/freeze_region.cpython-311.pyc,, +moviepy/video/fx/__pycache__/gamma_corr.cpython-311.pyc,, +moviepy/video/fx/__pycache__/headblur.cpython-311.pyc,, +moviepy/video/fx/__pycache__/invert_colors.cpython-311.pyc,, +moviepy/video/fx/__pycache__/loop.cpython-311.pyc,, +moviepy/video/fx/__pycache__/lum_contrast.cpython-311.pyc,, +moviepy/video/fx/__pycache__/make_loopable.cpython-311.pyc,, +moviepy/video/fx/__pycache__/margin.cpython-311.pyc,, +moviepy/video/fx/__pycache__/mask_and.cpython-311.pyc,, +moviepy/video/fx/__pycache__/mask_color.cpython-311.pyc,, +moviepy/video/fx/__pycache__/mask_or.cpython-311.pyc,, +moviepy/video/fx/__pycache__/mirror_x.cpython-311.pyc,, +moviepy/video/fx/__pycache__/mirror_y.cpython-311.pyc,, +moviepy/video/fx/__pycache__/painting.cpython-311.pyc,, +moviepy/video/fx/__pycache__/resize.cpython-311.pyc,, +moviepy/video/fx/__pycache__/rotate.cpython-311.pyc,, +moviepy/video/fx/__pycache__/scroll.cpython-311.pyc,, +moviepy/video/fx/__pycache__/speedx.cpython-311.pyc,, +moviepy/video/fx/__pycache__/supersample.cpython-311.pyc,, +moviepy/video/fx/__pycache__/time_mirror.cpython-311.pyc,, +moviepy/video/fx/__pycache__/time_symmetrize.cpython-311.pyc,, +moviepy/video/fx/accel_decel.py,sha256=n-jIw5KDWcMcOyC7xJf7VD5ZHKc-lXOjpeLqFUEkRf0,1225 +moviepy/video/fx/all/__init__.py,sha256=qetnjhZ2DBMOcy5HcnDVWdbQ8imohA1a_vulUSOD4F8,348 +moviepy/video/fx/all/__pycache__/__init__.cpython-311.pyc,, +moviepy/video/fx/blackwhite.py,sha256=eEFRFRKPSTYtxE9BlwZ-VUlXuWkaJZHz-jyHopUgo00,678 +moviepy/video/fx/blink.py,sha256=2KST8bscD_MCoYf8z4Vv1_czrnQmJDuwYk29OPMywtM,424 +moviepy/video/fx/colorx.py,sha256=01d1qya5Rcy8d4yWWFsDWQpMMVq6QdPxWuWYCFQtYjM,356 +moviepy/video/fx/crop.py,sha256=T4UIUbFKzgLX1xdJhAX4dswcrasZBpaiVGHaKeomCa0,1618 +moviepy/video/fx/even_size.py,sha256=deRjldfdEnoCjwCBkq8GBUPkwWmSlSxl7zqoYQMC_1E,486 +moviepy/video/fx/fadein.py,sha256=7sgU5i4z4ixoNsH1bY5CPF6nfTFA1DHQdW6AH-5Qegk,778 +moviepy/video/fx/fadeout.py,sha256=bEcSvtQJW0dR1e51FVfBtphNQH9UEgJbBa2-3apMRSA,859 +moviepy/video/fx/freeze.py,sha256=rMNZ-YHcM5pSsLjLV86mpp74kOfjy0avNkw5aXflwwM,1117 +moviepy/video/fx/freeze_region.py,sha256=AQLGDUfOaqxgvhbneK1hrruITPO9e_ymSPnyBVREquo,1921 +moviepy/video/fx/gamma_corr.py,sha256=R57Qpadustr91V2fuH6-9D-GJkrvGWvrc2K80DUpgos,212 +moviepy/video/fx/headblur.py,sha256=_slnQ6EX2llHtR6yYT9E_F626g647t2hLxixxVxz0MQ,1816 +moviepy/video/fx/invert_colors.py,sha256=zJ8GxaICGxLYhUmHOs0_4HqCpVCstoq3GE26ilbE0-k,289 +moviepy/video/fx/loop.py,sha256=2zahdRTzmQTeFvIQOgEYiwD4__0uwbuGiE3QYQEjEWA,727 +moviepy/video/fx/lum_contrast.py,sha256=ET2S1qMd5kEIh5tFpL-sTWxtU6LHGx1jtGw_NW16FeM,406 +moviepy/video/fx/make_loopable.py,sha256=DrfqEj48DH1yxQwEEtFKLzlWtWR21oGqQjOrCwpt_6w,532 +moviepy/video/fx/margin.py,sha256=az1ZIfz3NE67JSCtnrw5RIrL9Z6ADP11tU2BIpr5TNQ,1726 +moviepy/video/fx/mask_and.py,sha256=Pr-osWikjIQp9Re5MT3ufezOL25Ttz8r-KuAAHLlyTU,662 +moviepy/video/fx/mask_color.py,sha256=3ertiUdcgbna8d6rkY7Vo8ylz2ilkegiIGsK0sNAOu8,903 +moviepy/video/fx/mask_or.py,sha256=72oxhZe1jM04JpR0osezDtPvUWb7PJMdr0Sc7diZrcc,666 +moviepy/video/fx/mirror_x.py,sha256=GlRCyuatW7bCgZC0HtrUxKOf2gilhGhf-jT1W3uzPpA,177 +moviepy/video/fx/mirror_y.py,sha256=wY5njXDVmGx8ewVIbDSJ-TXgG1XvE2k0Et8B4lh412E,173 +moviepy/video/fx/painting.py,sha256=KEcbIiqcTGlI3TqAGWhLs48YIO_QgHAQH328-GwaN8Q,1412 +moviepy/video/fx/resize.py,sha256=4iPt0l28IUE1HipdS1D8g90YASHxgi73aM5FDa_Xdp4,4942 +moviepy/video/fx/rotate.py,sha256=oRvXl9yMDTuorfjMDZDY5Sr5c4AMkbE-LMkJo9nnBkE,2195 +moviepy/video/fx/scroll.py,sha256=bXUtgi8OlOLOMkW4CWZUvlArbfjqMjfixtIXuQJnj7I,526 +moviepy/video/fx/speedx.py,sha256=hKUhAWXSsmN8uhM4tPz4P-506jNl_j0hy5gW2F6rSIY,723 +moviepy/video/fx/supersample.py,sha256=W31r8T7uwKQrqx6VTSqsRex7EZViTuuKSCf_0o1ePVg,421 +moviepy/video/fx/time_mirror.py,sha256=-cs7r_XevuoDF1TV7dM7jCmmFhnHwjaXeM2_dV0y7eU,425 +moviepy/video/fx/time_symmetrize.py,sha256=KpKB97vnBheYTrWMaoqbTkZEPniBVb_pt40I2UJw2H8,602 +moviepy/video/io/ImageSequenceClip.py,sha256=H4yZBUmzKXdxUeAlN8lF52sRG3SXgNUp-5geyWEZp78,4956 +moviepy/video/io/VideoFileClip.py,sha256=Z2Qq7MPzu8Ng8ZNkkwUmrBvrabjSfv6nIPrXHQuDu_E,4500 +moviepy/video/io/__init__.py,sha256=f_CLHkRt4wh9sMTfcORcoU_XhM5hYwQF6beNXYxbVAA,77 +moviepy/video/io/__pycache__/ImageSequenceClip.cpython-311.pyc,, +moviepy/video/io/__pycache__/VideoFileClip.cpython-311.pyc,, +moviepy/video/io/__pycache__/__init__.cpython-311.pyc,, +moviepy/video/io/__pycache__/bindings.cpython-311.pyc,, +moviepy/video/io/__pycache__/downloader.cpython-311.pyc,, +moviepy/video/io/__pycache__/ffmpeg_reader.cpython-311.pyc,, +moviepy/video/io/__pycache__/ffmpeg_tools.cpython-311.pyc,, +moviepy/video/io/__pycache__/ffmpeg_writer.cpython-311.pyc,, +moviepy/video/io/__pycache__/gif_writers.cpython-311.pyc,, +moviepy/video/io/__pycache__/html_tools.cpython-311.pyc,, +moviepy/video/io/__pycache__/preview.cpython-311.pyc,, +moviepy/video/io/__pycache__/sliders.cpython-311.pyc,, +moviepy/video/io/bindings.py,sha256=ZPmk4nhs3p9a5UOc0vPAzJqz4r_Dm_8gFMt9UxsSasA,1079 +moviepy/video/io/downloader.py,sha256=Ya8-QGdqMrv_RSkqweNDDOPwFuOmX_U4fPIfUw7u7FI,1054 +moviepy/video/io/ffmpeg_reader.py,sha256=MiLqJ13MMKmgQnuW3S9NUCMBzjKW4bwuRgFFw08S6lA,13326 +moviepy/video/io/ffmpeg_tools.py,sha256=MDoPLL3J7XU-I8_FxFvLG_3DHHQArHAl7Su3TIrO9Y8,2321 +moviepy/video/io/ffmpeg_writer.py,sha256=olpaK8FHdxOf3zIkx72vger9VCtyF80JirtFOm3yz4w,9064 +moviepy/video/io/gif_writers.py,sha256=8MDwrW62IP5inW7hJBkgphWYpxeNa5fPPWwqcGotXhs,8933 +moviepy/video/io/html_tools.py,sha256=8-YCJCClZl0Dfna6elFzXvQrNWWKbv79_XIqvPjHAto,7591 +moviepy/video/io/preview.py,sha256=fcmW6gx4aqFlm6Pdj7GGRlv6-xfP98KnApXi7GRiWuI,4794 +moviepy/video/io/sliders.py,sha256=CcUGXVMnP2kgtdtODs52m9xT7LQdndrhAuHb-X7-q5Y,2116 +moviepy/video/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +moviepy/video/tools/__pycache__/__init__.cpython-311.pyc,, +moviepy/video/tools/__pycache__/credits.cpython-311.pyc,, +moviepy/video/tools/__pycache__/cuts.cpython-311.pyc,, +moviepy/video/tools/__pycache__/drawing.cpython-311.pyc,, +moviepy/video/tools/__pycache__/interpolators.cpython-311.pyc,, +moviepy/video/tools/__pycache__/segmenting.cpython-311.pyc,, +moviepy/video/tools/__pycache__/subtitles.cpython-311.pyc,, +moviepy/video/tools/__pycache__/tracking.cpython-311.pyc,, +moviepy/video/tools/credits.py,sha256=8bBkmGZCVdcuXgaXlHHnpQ7ztwWwEe7zFvojam5vJdk,3414 +moviepy/video/tools/cuts.py,sha256=lYFNvXl0fZeU5GwhDO5Eg8ql_VrAHjDelK9B-TQypgM,10757 +moviepy/video/tools/drawing.py,sha256=7obNphKPUGLwyHzsw-KI-3vpVxsv4ZA9rdp507ce5P4,8760 +moviepy/video/tools/interpolators.py,sha256=0ZJItLPf0AKOvPH1quVjQ0nhESrezqK8G_rjq-72vQs,2122 +moviepy/video/tools/segmenting.py,sha256=ELePrd_Zr0uOnpraYww_P5kjCzE_anG2gfnRdQCIf1E,1821 +moviepy/video/tools/subtitles.py,sha256=QgBRfXJ7_GjRARV_NWewBEXjNP5KoKY9ZWbGWUjHHrA,5215 +moviepy/video/tools/tracking.py,sha256=s5TMgCwun1vuLbqZnWrergSKTY4-rYql3WcFcuoZB_E,6066 diff --git a/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/REQUESTED b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/WHEEL b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/WHEEL new file mode 100644 index 00000000..bab98d67 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/top_level.txt b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/top_level.txt new file mode 100644 index 00000000..a384113e --- /dev/null +++ b/.venv/Lib/site-packages/moviepy-1.0.3.dist-info/top_level.txt @@ -0,0 +1 @@ +moviepy diff --git a/.venv/Lib/site-packages/moviepy/Clip.py b/.venv/Lib/site-packages/moviepy/Clip.py new file mode 100644 index 00000000..24bc3b2c --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/Clip.py @@ -0,0 +1,501 @@ +""" +This module implements the central object of MoviePy, the Clip, and +all the methods that are common to the two subclasses of Clip, VideoClip +and AudioClip. +""" + +from copy import copy + +import numpy as np +import proglog +from tqdm import tqdm + +from moviepy.decorators import (apply_to_audio, apply_to_mask, + convert_to_seconds, outplace, + requires_duration, use_clip_fps_by_default) + + +class Clip: + + """ + + Base class of all clips (VideoClips and AudioClips). + + + Attributes + ----------- + + start: + When the clip is included in a composition, time of the + composition at which the clip starts playing (in seconds). + + end: + When the clip is included in a composition, time of the + composition at which the clip stops playing (in seconds). + + duration: + Duration of the clip (in seconds). Some clips are infinite, in + this case their duration will be ``None``. + + """ + + # prefix for all temporary video and audio files. + # You can overwrite it with + # >>> Clip._TEMP_FILES_PREFIX = "temp_" + + _TEMP_FILES_PREFIX = 'TEMP_MPY_' + + def __init__(self): + + self.start = 0 + self.end = None + self.duration = None + + self.memoize = False + self.memoized_t = None + self.memoize_frame = None + + def copy(self): + """ Shallow copy of the clip. + + Returns a shallow copy of the clip whose mask and audio will + be shallow copies of the clip's mask and audio if they exist. + + This method is intensively used to produce new clips every time + there is an outplace transformation of the clip (clip.resize, + clip.subclip, etc.) + """ + + newclip = copy(self) + if hasattr(self, 'audio'): + newclip.audio = copy(self.audio) + if hasattr(self, 'mask'): + newclip.mask = copy(self.mask) + + return newclip + + @convert_to_seconds(['t']) + def get_frame(self, t): + """ + Gets a numpy array representing the RGB picture of the clip at time t + or (mono or stereo) value for a sound clip + """ + # Coming soon: smart error handling for debugging at this point + if self.memoize: + if t == self.memoized_t: + return self.memoized_frame + else: + frame = self.make_frame(t) + self.memoized_t = t + self.memoized_frame = frame + return frame + else: + return self.make_frame(t) + + def fl(self, fun, apply_to=None, keep_duration=True): + """ General processing of a clip. + + Returns a new Clip whose frames are a transformation + (through function ``fun``) of the frames of the current clip. + + Parameters + ----------- + + fun + A function with signature (gf,t -> frame) where ``gf`` will + represent the current clip's ``get_frame`` method, + i.e. ``gf`` is a function (t->image). Parameter `t` is a time + in seconds, `frame` is a picture (=Numpy array) which will be + returned by the transformed clip (see examples below). + + apply_to + Can be either ``'mask'``, or ``'audio'``, or + ``['mask','audio']``. + Specifies if the filter ``fl`` should also be applied to the + audio or the mask of the clip, if any. + + keep_duration + Set to True if the transformation does not change the + ``duration`` of the clip. + + Examples + -------- + + In the following ``newclip`` a 100 pixels-high clip whose video + content scrolls from the top to the bottom of the frames of + ``clip``. + + >>> fl = lambda gf,t : gf(t)[int(t):int(t)+50, :] + >>> newclip = clip.fl(fl, apply_to='mask') + + """ + if apply_to is None: + apply_to = [] + + #mf = copy(self.make_frame) + newclip = self.set_make_frame(lambda t: fun(self.get_frame, t)) + + if not keep_duration: + newclip.duration = None + newclip.end = None + + if isinstance(apply_to, str): + apply_to = [apply_to] + + for attr in apply_to: + a = getattr(newclip, attr, None) + if a is not None: + new_a = a.fl(fun, keep_duration=keep_duration) + setattr(newclip, attr, new_a) + + return newclip + + def fl_time(self, t_func, apply_to=None, keep_duration=False): + """ + Returns a Clip instance playing the content of the current clip + but with a modified timeline, time ``t`` being replaced by another + time `t_func(t)`. + + Parameters + ----------- + + t_func: + A function ``t-> new_t`` + + apply_to: + Can be either 'mask', or 'audio', or ['mask','audio']. + Specifies if the filter ``fl`` should also be applied to the + audio or the mask of the clip, if any. + + keep_duration: + ``False`` (default) if the transformation modifies the + ``duration`` of the clip. + + Examples + -------- + + >>> # plays the clip (and its mask and sound) twice faster + >>> newclip = clip.fl_time(lambda: 2*t, apply_to=['mask', 'audio']) + >>> + >>> # plays the clip starting at t=3, and backwards: + >>> newclip = clip.fl_time(lambda: 3-t) + + """ + if apply_to is None: + apply_to = [] + + return self.fl(lambda gf, t: gf(t_func(t)), apply_to, + keep_duration=keep_duration) + + def fx(self, func, *args, **kwargs): + """ + + Returns the result of ``func(self, *args, **kwargs)``. + for instance + + >>> newclip = clip.fx(resize, 0.2, method='bilinear') + + is equivalent to + + >>> newclip = resize(clip, 0.2, method='bilinear') + + The motivation of fx is to keep the name of the effect near its + parameters, when the effects are chained: + + >>> from moviepy.video.fx import volumex, resize, mirrorx + >>> clip.fx( volumex, 0.5).fx( resize, 0.3).fx( mirrorx ) + >>> # Is equivalent, but clearer than + >>> resize( volumex( mirrorx( clip ), 0.5), 0.3) + + """ + + return func(self, *args, **kwargs) + + + + @apply_to_mask + @apply_to_audio + @convert_to_seconds(['t']) + @outplace + def set_start(self, t, change_end=True): + """ + Returns a copy of the clip, with the ``start`` attribute set + to ``t``, which can be expressed in seconds (15.35), in (min, sec), + in (hour, min, sec), or as a string: '01:03:05.35'. + + + If ``change_end=True`` and the clip has a ``duration`` attribute, + the ``end`` atrribute of the clip will be updated to + ``start+duration``. + + If ``change_end=False`` and the clip has a ``end`` attribute, + the ``duration`` attribute of the clip will be updated to + ``end-start`` + + These changes are also applied to the ``audio`` and ``mask`` + clips of the current clip, if they exist. + """ + + self.start = t + if (self.duration is not None) and change_end: + self.end = t + self.duration + elif self.end is not None: + self.duration = self.end - self.start + + + + @apply_to_mask + @apply_to_audio + @convert_to_seconds(['t']) + @outplace + def set_end(self, t): + """ + Returns a copy of the clip, with the ``end`` attribute set to + ``t``, which can be expressed in seconds (15.35), in (min, sec), + in (hour, min, sec), or as a string: '01:03:05.35'. + Also sets the duration of the mask and audio, if any, + of the returned clip. + """ + self.end = t + if self.end is None: return + if self.start is None: + if self.duration is not None: + self.start = max(0, t - newclip.duration) + else: + self.duration = self.end - self.start + + + + @apply_to_mask + @apply_to_audio + @convert_to_seconds(['t']) + @outplace + def set_duration(self, t, change_end=True): + """ + Returns a copy of the clip, with the ``duration`` attribute + set to ``t``, which can be expressed in seconds (15.35), in (min, sec), + in (hour, min, sec), or as a string: '01:03:05.35'. + Also sets the duration of the mask and audio, if any, of the + returned clip. + If change_end is False, the start attribute of the clip will + be modified in function of the duration and the preset end + of the clip. + """ + self.duration = t + + if change_end: + self.end = None if (t is None) else (self.start + t) + else: + if self.duration is None: + raise Exception("Cannot change clip start when new" + "duration is None") + self.start = self.end - t + + + @outplace + def set_make_frame(self, make_frame): + """ + Sets a ``make_frame`` attribute for the clip. Useful for setting + arbitrary/complicated videoclips. + """ + self.make_frame = make_frame + + @outplace + def set_fps(self, fps): + """ Returns a copy of the clip with a new default fps for functions like + write_videofile, iterframe, etc. """ + self.fps = fps + + + @outplace + def set_ismask(self, ismask): + """ Says wheter the clip is a mask or not (ismask is a boolean)""" + self.ismask = ismask + + @outplace + def set_memoize(self, memoize): + """ Sets wheter the clip should keep the last frame read in memory """ + self.memoize = memoize + + @convert_to_seconds(['t']) + def is_playing(self, t): + """ + + If t is a time, returns true if t is between the start and + the end of the clip. t can be expressed in seconds (15.35), + in (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. + If t is a numpy array, returns False if none of the t is in + theclip, else returns a vector [b_1, b_2, b_3...] where b_i + is true iff tti is in the clip. + """ + + if isinstance(t, np.ndarray): + # is the whole list of t outside the clip ? + tmin, tmax = t.min(), t.max() + + if (self.end is not None) and (tmin >= self.end): + return False + + if tmax < self.start: + return False + + # If we arrive here, a part of t falls in the clip + result = 1 * (t >= self.start) + if self.end is not None: + result *= (t <= self.end) + return result + + else: + + return((t >= self.start) and + ((self.end is None) or (t < self.end))) + + + @convert_to_seconds(['t_start', 't_end']) + @apply_to_mask + @apply_to_audio + def subclip(self, t_start=0, t_end=None): + """ + Returns a clip playing the content of the current clip + between times ``t_start`` and ``t_end``, which can be expressed + in seconds (15.35), in (min, sec), in (hour, min, sec), or as a + string: '01:03:05.35'. + If ``t_end`` is not provided, it is assumed to be the duration + of the clip (potentially infinite). + If ``t_end`` is a negative value, it is reset to + ``clip.duration + t_end. ``. For instance: :: + + >>> # cut the last two seconds of the clip: + >>> newclip = clip.subclip(0,-2) + + If ``t_end`` is provided or if the clip has a duration attribute, + the duration of the returned clip is set automatically. + + The ``mask`` and ``audio`` of the resulting subclip will be + subclips of ``mask`` and ``audio`` the original clip, if + they exist. + """ + + if t_start < 0: + # Make this more Python-like, a negative value means to move + # backward from the end of the clip + t_start = self.duration + t_start # Remember t_start is negative + + if (self.duration is not None) and (t_start > self.duration): + raise ValueError("t_start (%.02f) " % t_start + + "should be smaller than the clip's " + + "duration (%.02f)." % self.duration) + + newclip = self.fl_time(lambda t: t + t_start, apply_to=[]) + + if (t_end is None) and (self.duration is not None): + + t_end = self.duration + + elif (t_end is not None) and (t_end < 0): + + if self.duration is None: + + print("Error: subclip with negative times (here %s)" % (str((t_start, t_end))) + + " can only be extracted from clips with a ``duration``") + + else: + + t_end = self.duration + t_end + + if t_end is not None: + + newclip.duration = t_end - t_start + newclip.end = newclip.start + newclip.duration + + return newclip + + + @apply_to_mask + @apply_to_audio + @convert_to_seconds(['ta', 'tb']) + def cutout(self, ta, tb): + """ + Returns a clip playing the content of the current clip but + skips the extract between ``ta`` and ``tb``, which can be + expressed in seconds (15.35), in (min, sec), in (hour, min, sec), + or as a string: '01:03:05.35'. + If the original clip has a ``duration`` attribute set, + the duration of the returned clip is automatically computed as + `` duration - (tb - ta)``. + + The resulting clip's ``audio`` and ``mask`` will also be cutout + if they exist. + """ + + fl = lambda t: t + (t >= ta)*(tb - ta) + newclip = self.fl_time(fl) + + if self.duration is not None: + + return newclip.set_duration(self.duration - (tb - ta)) + + else: + + return newclip + + @requires_duration + @use_clip_fps_by_default + def iter_frames(self, fps=None, with_times = False, logger=None, + dtype=None): + """ Iterates over all the frames of the clip. + + Returns each frame of the clip as a HxWxN np.array, + where N=1 for mask clips and N=3 for RGB clips. + + This function is not really meant for video editing. + It provides an easy way to do frame-by-frame treatment of + a video, for fields like science, computer vision... + + The ``fps`` (frames per second) parameter is optional if the + clip already has a ``fps`` attribute. + + Use dtype="uint8" when using the pictures to write video, images... + + Examples + --------- + + >>> # prints the maximum of red that is contained + >>> # on the first line of each frame of the clip. + >>> from moviepy.editor import VideoFileClip + >>> myclip = VideoFileClip('myvideo.mp4') + >>> print ( [frame[0,:,0].max() + for frame in myclip.iter_frames()]) + """ + logger = proglog.default_bar_logger(logger) + for t in logger.iter_bar(t=np.arange(0, self.duration, 1.0/fps)): + frame = self.get_frame(t) + if (dtype is not None) and (frame.dtype != dtype): + frame = frame.astype(dtype) + if with_times: + yield t, frame + else: + yield frame + + def close(self): + """ + Release any resources that are in use. + """ + + # Implementation note for subclasses: + # + # * Memory-based resources can be left to the garbage-collector. + # * However, any open files should be closed, and subprocesses + # should be terminated. + # * Be wary that shallow copies are frequently used. + # Closing a Clip may affect its copies. + # * Therefore, should NOT be called by __del__(). + pass + + # Support the Context Manager protocol, to ensure that resources are cleaned up. + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() diff --git a/.venv/Lib/site-packages/moviepy/__init__.py b/.venv/Lib/site-packages/moviepy/__init__.py new file mode 100644 index 00000000..58f3ace6 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/__init__.py @@ -0,0 +1 @@ +from .version import __version__ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/Clip.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/Clip.cpython-311.pyc new file mode 100644 index 00000000..71d89623 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/Clip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..7a7a8c47 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/compat.cpython-311.pyc new file mode 100644 index 00000000..8f656fa3 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/config.cpython-311.pyc new file mode 100644 index 00000000..020f9237 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/config_defaults.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/config_defaults.cpython-311.pyc new file mode 100644 index 00000000..bd246a9f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/config_defaults.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/decorators.cpython-311.pyc new file mode 100644 index 00000000..e979021a Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/editor.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/editor.cpython-311.pyc new file mode 100644 index 00000000..d13282d8 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/editor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/tools.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/tools.cpython-311.pyc new file mode 100644 index 00000000..42c80fa7 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/tools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/utils.cpython-311.pyc new file mode 100644 index 00000000..ae25620c Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/__pycache__/version.cpython-311.pyc new file mode 100644 index 00000000..62d9ac26 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/AudioClip.py b/.venv/Lib/site-packages/moviepy/audio/AudioClip.py new file mode 100644 index 00000000..122d9b2e --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/AudioClip.py @@ -0,0 +1,323 @@ +import os + +import numpy as np +import proglog +from tqdm import tqdm + +from moviepy.audio.io.ffmpeg_audiowriter import ffmpeg_audiowrite +from moviepy.Clip import Clip +from moviepy.decorators import requires_duration +from moviepy.tools import deprecated_version_of, extensions_dict + + +class AudioClip(Clip): + """ Base class for audio clips. + + See ``AudioFileClip`` and ``CompositeSoundClip`` for usable classes. + + An AudioClip is a Clip with a ``make_frame`` attribute of + the form `` t -> [ f_t ]`` for mono sound and + ``t-> [ f1_t, f2_t ]`` for stereo sound (the arrays are Numpy arrays). + The `f_t` are floats between -1 and 1. These bounds can be + trespassed wihtout problems (the program will put the + sound back into the bounds at conversion time, without much impact). + + Parameters + ----------- + + make_frame + A function `t-> frame at time t`. The frame does not mean much + for a sound, it is just a float. What 'makes' the sound are + the variations of that float in the time. + + nchannels + Number of channels (one or two for mono or stereo). + + Examples + --------- + + >>> # Plays the note A (a sine wave of frequency 440HZ) + >>> import numpy as np + >>> make_frame = lambda t: 2*[ np.sin(440 * 2 * np.pi * t) ] + >>> clip = AudioClip(make_frame, duration=5) + >>> clip.preview() + + """ + + def __init__(self, make_frame=None, duration=None, fps=None): + Clip.__init__(self) + + if fps is not None: + self.fps = fps + + if make_frame is not None: + self.make_frame = make_frame + frame0 = self.get_frame(0) + if hasattr(frame0, '__iter__'): + self.nchannels = len(list(frame0)) + else: + self.nchannels = 1 + if duration is not None: + self.duration = duration + self.end = duration + + @requires_duration + def iter_chunks(self, chunksize=None, chunk_duration=None, fps=None, + quantize=False, nbytes=2, logger=None): + """ Iterator that returns the whole sound array of the clip by chunks + """ + if fps is None: + fps = self.fps + logger = proglog.default_bar_logger(logger) + if chunk_duration is not None: + chunksize = int(chunk_duration*fps) + + totalsize = int(fps*self.duration) + + nchunks = totalsize // chunksize + 1 + + pospos = np.linspace(0, totalsize, nchunks + 1, endpoint=True, dtype=int) + + for i in logger.iter_bar(chunk=list(range(nchunks))): + size = pospos[i+1] - pospos[i] + assert(size <= chunksize) + tt = (1.0/fps)*np.arange(pospos[i], pospos[i+1]) + yield self.to_soundarray(tt, nbytes=nbytes, quantize=quantize, + fps=fps, buffersize=chunksize) + + @requires_duration + def to_soundarray(self, tt=None, fps=None, quantize=False, nbytes=2, buffersize=50000): + """ + Transforms the sound into an array that can be played by pygame + or written in a wav file. See ``AudioClip.preview``. + + Parameters + ------------ + + fps + Frame rate of the sound for the conversion. + 44100 for top quality. + + nbytes + Number of bytes to encode the sound: 1 for 8bit sound, + 2 for 16bit, 4 for 32bit sound. + + """ + if fps is None: + fps = self.fps + + stacker = np.vstack if self.nchannels == 2 else np.hstack + max_duration = 1.0 * buffersize / fps + if tt is None: + if self.duration > max_duration: + return stacker(self.iter_chunks(fps=fps, quantize=quantize, + nbytes=2, chunksize=buffersize)) + else: + tt = np.arange(0, self.duration, 1.0/fps) + """ + elif len(tt)> 1.5*buffersize: + nchunks = int(len(tt)/buffersize+1) + tt_chunks = np.array_split(tt, nchunks) + return stacker([self.to_soundarray(tt=ttc, buffersize=buffersize, fps=fps, + quantize=quantize, nbytes=nbytes) + for ttc in tt_chunks]) + """ + #print tt.max() - tt.min(), tt.min(), tt.max() + + snd_array = self.get_frame(tt) + + if quantize: + snd_array = np.maximum(-0.99, np.minimum(0.99, snd_array)) + inttype = {1: 'int8', 2: 'int16', 4: 'int32'}[nbytes] + snd_array = (2**(8*nbytes-1)*snd_array).astype(inttype) + + return snd_array + + def max_volume(self, stereo=False, chunksize=50000, logger=None): + + stereo = stereo and (self.nchannels == 2) + + maxi = np.array([0, 0]) if stereo else 0 + for chunk in self.iter_chunks(chunksize=chunksize,logger=logger): + maxi = np.maximum(maxi, abs(chunk).max(axis=0)) if stereo else max(maxi, abs(chunk).max()) + return maxi + + @requires_duration + def write_audiofile(self, filename, fps=None, nbytes=2, buffersize=2000, + codec=None, bitrate=None, ffmpeg_params=None, + write_logfile=False, verbose=True, logger='bar'): + """ Writes an audio file from the AudioClip. + + + Parameters + ----------- + + filename + Name of the output file + + fps + Frames per second. If not set, it will try default to self.fps if + already set, otherwise it will default to 44100 + + nbytes + Sample width (set to 2 for 16-bit sound, 4 for 32-bit sound) + + codec + Which audio codec should be used. If None provided, the codec is + determined based on the extension of the filename. Choose + 'pcm_s16le' for 16-bit wav and 'pcm_s32le' for 32-bit wav. + + bitrate + Audio bitrate, given as a string like '50k', '500k', '3000k'. + Will determine the size and quality of the output file. + Note that it mainly an indicative goal, the bitrate won't + necessarily be the this in the output file. + + ffmpeg_params + Any additional parameters you would like to pass, as a list + of terms, like ['-option1', 'value1', '-option2', 'value2'] + + write_logfile + If true, produces a detailed logfile named filename + '.log' + when writing the file + + verbose + Boolean indicating whether to print infomation + + logger + Either 'bar' or None or any Proglog logger + + """ + if not fps: + if not self.fps: + fps = 44100 + else: + fps = self.fps + + if codec is None: + name, ext = os.path.splitext(os.path.basename(filename)) + try: + codec = extensions_dict[ext[1:]]['codec'][0] + except KeyError: + raise ValueError("MoviePy couldn't find the codec associated " + "with the filename. Provide the 'codec' " + "parameter in write_audiofile.") + + return ffmpeg_audiowrite(self, filename, fps, nbytes, buffersize, + codec=codec, bitrate=bitrate, + write_logfile=write_logfile, verbose=verbose, + ffmpeg_params=ffmpeg_params, + logger=logger) + + +# The to_audiofile method is replaced by the more explicit write_audiofile. +AudioClip.to_audiofile = deprecated_version_of(AudioClip.write_audiofile, + 'to_audiofile') +### + + +class AudioArrayClip(AudioClip): + """ + + An audio clip made from a sound array. + + Parameters + ----------- + + array + A Numpy array representing the sound, of size Nx1 for mono, + Nx2 for stereo. + + fps + Frames per second : speed at which the sound is supposed to be + played. + + """ + + def __init__(self, array, fps): + + Clip.__init__(self) + self.array = array + self.fps = fps + self.duration = 1.0 * len(array) / fps + + def make_frame(t): + """ complicated, but must be able to handle the case where t + is a list of the form sin(t) """ + + if isinstance(t, np.ndarray): + array_inds = (self.fps*t).astype(int) + in_array = (array_inds > 0) & (array_inds < len(self.array)) + result = np.zeros((len(t), 2)) + result[in_array] = self.array[array_inds[in_array]] + return result + else: + i = int(self.fps * t) + if i < 0 or i >= len(self.array): + return 0*self.array[0] + else: + return self.array[i] + + self.make_frame = make_frame + self.nchannels = len(list(self.get_frame(0))) + + +class CompositeAudioClip(AudioClip): + + """ Clip made by composing several AudioClips. + + An audio clip made by putting together several audio clips. + + Parameters + ------------ + + clips + List of audio clips, which may start playing at different times or + together. If all have their ``duration`` attribute set, the + duration of the composite clip is computed automatically. + + """ + + def __init__(self, clips): + + Clip.__init__(self) + self.clips = clips + + ends = [c.end for c in self.clips] + self.nchannels = max([c.nchannels for c in self.clips]) + if not any([(e is None) for e in ends]): + self.duration = max(ends) + self.end = max(ends) + + def make_frame(t): + + played_parts = [c.is_playing(t) for c in self.clips] + + sounds = [c.get_frame(t - c.start)*np.array([part]).T + for c, part in zip(self.clips, played_parts) + if (part is not False)] + + if isinstance(t, np.ndarray): + zero = np.zeros((len(t), self.nchannels)) + + else: + zero = np.zeros(self.nchannels) + + return zero + sum(sounds) + + self.make_frame = make_frame + + +def concatenate_audioclips(clips): + """ + The clip with the highest FPS will be the FPS of the result clip. + """ + durations = [c.duration for c in clips] + tt = np.cumsum([0]+durations) # start times, and end time. + newclips = [c.set_start(t) for c, t in zip(clips, tt)] + + result = CompositeAudioClip(newclips).set_duration(tt[-1]) + + fpss = [c.fps for c in clips if getattr(c, 'fps', None)] + result.fps = max(fpss) if fpss else None + return result diff --git a/.venv/Lib/site-packages/moviepy/audio/__init__.py b/.venv/Lib/site-packages/moviepy/audio/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/moviepy/audio/__pycache__/AudioClip.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/__pycache__/AudioClip.cpython-311.pyc new file mode 100644 index 00000000..391a86c8 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/__pycache__/AudioClip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..cb44cf24 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__init__.py b/.venv/Lib/site-packages/moviepy/audio/fx/__init__.py new file mode 100644 index 00000000..67fe3b19 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/__init__.py @@ -0,0 +1,4 @@ +""" +This module contains transformation functions (clip->clip) +One file for one fx. The file's name is the fx's name +""" diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..8b15d89f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_fadein.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_fadein.cpython-311.pyc new file mode 100644 index 00000000..dfbc648f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_fadein.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_fadeout.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_fadeout.cpython-311.pyc new file mode 100644 index 00000000..64c919d9 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_fadeout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_left_right.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_left_right.cpython-311.pyc new file mode 100644 index 00000000..28a0f3fc Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_left_right.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_loop.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_loop.cpython-311.pyc new file mode 100644 index 00000000..bd93d393 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_loop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_normalize.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_normalize.cpython-311.pyc new file mode 100644 index 00000000..7255d79f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/audio_normalize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/volumex.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/volumex.cpython-311.pyc new file mode 100644 index 00000000..6fa48449 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/__pycache__/volumex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/all/__init__.py b/.venv/Lib/site-packages/moviepy/audio/fx/all/__init__.py new file mode 100644 index 00000000..11258d7f --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/all/__init__.py @@ -0,0 +1,16 @@ +""" +Loads all the fx ! +Usage: +import moviepy.audio.fx.all as afx +audio_clip = afx.volume_x(some_clip, .5) +""" + +import pkgutil + +import moviepy.audio.fx as fx + +__all__ = [name for _, name, _ in pkgutil.iter_modules( + fx.__path__) if name != "all"] + +for name in __all__: + exec("from ..%s import %s" % (name, name)) diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/all/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/fx/all/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..956fdcbf Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/fx/all/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/audio_fadein.py b/.venv/Lib/site-packages/moviepy/audio/fx/audio_fadein.py new file mode 100644 index 00000000..20b1eed0 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/audio_fadein.py @@ -0,0 +1,21 @@ +import numpy as np + +from moviepy.decorators import audio_video_fx + + +@audio_video_fx +def audio_fadein(clip, duration): + """ Return an audio (or video) clip that is first mute, then the + sound arrives progressively over ``duration`` seconds. """ + + def fading(gf,t): + gft = gf(t) + + if np.isscalar(t): + factor = min(1.0 * t / duration, 1) + factor = np.array([factor,factor]) + else: + factor = np.minimum(1.0 * t / duration, 1) + factor = np.vstack([factor,factor]).T + return factor * gft + return clip.fl(fading, keep_duration = True) diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/audio_fadeout.py b/.venv/Lib/site-packages/moviepy/audio/fx/audio_fadeout.py new file mode 100644 index 00000000..ab6f40ee --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/audio_fadeout.py @@ -0,0 +1,23 @@ +import numpy as np + +from moviepy.decorators import audio_video_fx, requires_duration + + +@audio_video_fx +@requires_duration +def audio_fadeout(clip, duration): + """ Return a sound clip where the sound fades out progressively + over ``duration`` seconds at the end of the clip. """ + + def fading(gf,t): + gft = gf(t) + + if np.isscalar(t): + factor = min(1.0 * (clip.duration - t) / duration, 1) + factor = np.array([factor,factor]) + else: + factor = np.minimum( 1.0 * (clip.duration - t) / duration, 1) + factor = np.vstack([factor,factor]).T + return factor * gft + + return clip.fl(fading, keep_duration = True) diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/audio_left_right.py b/.venv/Lib/site-packages/moviepy/audio/fx/audio_left_right.py new file mode 100644 index 00000000..c98abcc0 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/audio_left_right.py @@ -0,0 +1,15 @@ +import numpy as np + + +def audio_left_right(audioclip, left=1, right=1, merge=False): + """ + NOT YET FINISHED + + For a stereo audioclip, this function enables to change the volume + of the left and right channel separately (with the factors `left` + and `right`) + Makes a stereo audio clip in which the volume of left and right + is controllable + """ + funleft = (lambda t: left) if np.isscalar(left) else left + funright = (lambda t: right) if np.isscalar(right) else right diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/audio_loop.py b/.venv/Lib/site-packages/moviepy/audio/fx/audio_loop.py new file mode 100644 index 00000000..7e309b7f --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/audio_loop.py @@ -0,0 +1,28 @@ +from ..AudioClip import concatenate_audioclips + + +def audio_loop(audioclip, nloops=None, duration=None): + """ Loops over an audio clip. + + Returns an audio clip that plays the given clip either + `nloops` times, or during `duration` seconds. + + Examples + ======== + + >>> from moviepy.editor import * + >>> videoclip = VideoFileClip('myvideo.mp4') + >>> music = AudioFileClip('music.ogg') + >>> audio = afx.audio_loop( music, duration=videoclip.duration) + >>> videoclip.set_audio(audio) + + """ + + if duration is not None: + + nloops = int( duration/ audioclip.duration)+1 + return concatenate_audioclips(nloops*[audioclip]).set_duration(duration) + + else: + + return concatenate_audioclips(nloops*[audioclip]) diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/audio_normalize.py b/.venv/Lib/site-packages/moviepy/audio/fx/audio_normalize.py new file mode 100644 index 00000000..0c4daff7 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/audio_normalize.py @@ -0,0 +1,23 @@ +from moviepy.decorators import audio_video_fx + +from .volumex import volumex + + +@audio_video_fx +def audio_normalize(clip): + """ Return a clip whose volume is normalized to 0db. + + Return an audio (or video) clip whose audio volume is normalized + so that the maximum volume is at 0db, the maximum achievable volume. + + Examples + ======== + + >>> from moviepy.editor import * + >>> videoclip = VideoFileClip('myvideo.mp4').fx(afx.audio_normalize) + + """ + + + mv = clip.max_volume() + return volumex(clip, 1 / mv) diff --git a/.venv/Lib/site-packages/moviepy/audio/fx/volumex.py b/.venv/Lib/site-packages/moviepy/audio/fx/volumex.py new file mode 100644 index 00000000..6ec84ad5 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/fx/volumex.py @@ -0,0 +1,20 @@ +from moviepy.decorators import audio_video_fx + + +@audio_video_fx +def volumex(clip, factor): + """ Returns a clip with audio volume multiplied by the + value `factor`. Can be applied to both audio and video clips. + + This effect is loaded as a clip method when you use moviepy.editor, + so you can just write ``clip.volumex(2)`` + + Examples + --------- + + >>> newclip = volumex(clip, 2.0) # doubles audio volume + >>> newclip = clip.fx( volumex, 0.5) # half audio, use with fx + >>> newclip = clip.volumex(2) # only if you used "moviepy.editor" + """ + return clip.fl(lambda gf, t: factor * gf(t), + keep_duration=True) diff --git a/.venv/Lib/site-packages/moviepy/audio/io/AudioFileClip.py b/.venv/Lib/site-packages/moviepy/audio/io/AudioFileClip.py new file mode 100644 index 00000000..b79ab23d --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/io/AudioFileClip.py @@ -0,0 +1,90 @@ +from __future__ import division + +from moviepy.audio.AudioClip import AudioClip +from moviepy.audio.io.readers import FFMPEG_AudioReader + + +class AudioFileClip(AudioClip): + + """ + An audio clip read from a sound file, or an array. + The whole file is not loaded in memory. Instead, only a portion is + read and stored in memory. this portion includes frames before + and after the last frames read, so that it is fast to read the sound + backward and forward. + + Parameters + ------------ + + filename + Either a soundfile name (of any extension supported by ffmpeg) + or an array representing a sound. If the soundfile is not a .wav, + it will be converted to .wav first, using the ``fps`` and + ``bitrate`` arguments. + + buffersize: + Size to load in memory (in number of frames) + + + Attributes + ------------ + + nbytes + Number of bits per frame of the original audio file. + + fps + Number of frames per second in the audio file + + buffersize + See Parameters. + + Lifetime + -------- + + Note that this creates subprocesses and locks files. If you construct one of these instances, you must call + close() afterwards, or the subresources will not be cleaned up until the process ends. + + If copies are made, and close() is called on one, it may cause methods on the other copies to fail. + + However, coreaders must be closed separately. + + Examples + ---------- + + >>> snd = AudioFileClip("song.wav") + >>> snd.close() + >>> snd = AudioFileClip("song.mp3", fps = 44100) + >>> second_reader = snd.coreader() + >>> second_reader.close() + >>> snd.close() + >>> with AudioFileClip(mySoundArray, fps=44100) as snd: # from a numeric array + >>> pass # Close is implicitly performed by context manager. + + """ + + def __init__(self, filename, buffersize=200000, nbytes=2, fps=44100): + + AudioClip.__init__(self) + + self.filename = filename + self.reader = FFMPEG_AudioReader(filename, fps=fps, nbytes=nbytes, + buffersize=buffersize) + self.fps = fps + self.duration = self.reader.duration + self.end = self.reader.duration + self.buffersize = self.reader.buffersize + + self.make_frame = lambda t: self.reader.get_frame(t) + self.nchannels = self.reader.nchannels + + def coreader(self): + """ Returns a copy of the AudioFileClip, i.e. a new entrance point + to the audio file. Use copy when you have different clips + watching the audio file at different times. """ + return AudioFileClip(self.filename, self.buffersize) + + def close(self): + """ Close the internal reader. """ + if self.reader: + self.reader.close_proc() + self.reader = None diff --git a/.venv/Lib/site-packages/moviepy/audio/io/__init__.py b/.venv/Lib/site-packages/moviepy/audio/io/__init__.py new file mode 100644 index 00000000..21b9da62 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/io/__init__.py @@ -0,0 +1,3 @@ +""" +Class and methods to read, write, preview audiofiles. +""" diff --git a/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/AudioFileClip.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/AudioFileClip.cpython-311.pyc new file mode 100644 index 00000000..e8fc05cf Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/AudioFileClip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..a515541c Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/ffmpeg_audiowriter.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/ffmpeg_audiowriter.cpython-311.pyc new file mode 100644 index 00000000..0ec1fa7d Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/ffmpeg_audiowriter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/preview.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/preview.cpython-311.pyc new file mode 100644 index 00000000..4bd86b6e Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/preview.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/readers.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/readers.cpython-311.pyc new file mode 100644 index 00000000..116c5dc3 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/audio/io/__pycache__/readers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/audio/io/ffmpeg_audiowriter.py b/.venv/Lib/site-packages/moviepy/audio/io/ffmpeg_audiowriter.py new file mode 100644 index 00000000..d02649f5 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/io/ffmpeg_audiowriter.py @@ -0,0 +1,176 @@ +import os +import subprocess as sp + +import proglog + +from moviepy.compat import DEVNULL +from moviepy.config import get_setting +from moviepy.decorators import requires_duration + + +class FFMPEG_AudioWriter: + """ + A class to write an AudioClip into an audio file. + + Parameters + ------------ + + filename + Name of any video or audio file, like ``video.mp4`` or ``sound.wav`` etc. + + size + Size (width,height) in pixels of the output video. + + fps_input + Frames per second of the input audio (given by the AUdioClip being + written down). + + codec + Name of the ffmpeg codec to use for the output. + + bitrate: + A string indicating the bitrate of the final video. Only + relevant for codecs which accept a bitrate. + + """ + + def __init__(self, filename, fps_input, nbytes=2, + nchannels=2, codec='libfdk_aac', bitrate=None, + input_video=None, logfile=None, ffmpeg_params=None): + + self.filename = filename + self.codec = codec + + if logfile is None: + logfile = sp.PIPE + + cmd = ([get_setting("FFMPEG_BINARY"), '-y', + "-loglevel", "error" if logfile == sp.PIPE else "info", + "-f", 's%dle' % (8*nbytes), + "-acodec",'pcm_s%dle' % (8*nbytes), + '-ar', "%d" % fps_input, + '-ac', "%d" % nchannels, + '-i', '-'] + + (['-vn'] if input_video is None else ["-i", input_video, '-vcodec', 'copy']) + + ['-acodec', codec] + + ['-ar', "%d" % fps_input] + + ['-strict', '-2'] # needed to support codec 'aac' + + (['-ab', bitrate] if (bitrate is not None) else []) + + (ffmpeg_params if ffmpeg_params else []) + + [filename]) + + popen_params = {"stdout": DEVNULL, + "stderr": logfile, + "stdin": sp.PIPE} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + self.proc = sp.Popen(cmd, **popen_params) + + def write_frames(self, frames_array): + try: + try: + self.proc.stdin.write(frames_array.tobytes()) + except NameError: + self.proc.stdin.write(frames_array.tostring()) + except IOError as err: + ffmpeg_error = self.proc.stderr.read() + error = (str(err) + ("\n\nMoviePy error: FFMPEG encountered " + "the following error while writing file %s:" % self.filename + + "\n\n" + str(ffmpeg_error))) + + if b"Unknown encoder" in ffmpeg_error: + + error = (error + + ("\n\nThe audio export failed because FFMPEG didn't " + "find the specified codec for audio encoding (%s). " + "Please install this codec or change the codec when " + "calling to_videofile or to_audiofile. For instance " + "for mp3:\n" + " >>> to_videofile('myvid.mp4', audio_codec='libmp3lame')" + ) % (self.codec)) + + elif b"incorrect codec parameters ?" in ffmpeg_error: + + error = (error + + ("\n\nThe audio export failed, possibly because the " + "codec specified for the video (%s) is not compatible" + " with the given extension (%s). Please specify a " + "valid 'codec' argument in to_videofile. This would " + "be 'libmp3lame' for mp3, 'libvorbis' for ogg...") + % (self.codec, self.ext)) + + elif b"encoder setup failed" in ffmpeg_error: + + error = (error + + ("\n\nThe audio export failed, possily because the " + "bitrate you specified was two high or too low for " + "the video codec.")) + + else: + error = (error + + ("\n\nIn case it helps, make sure you are using a " + "recent version of FFMPEG (the versions in the " + "Ubuntu/Debian repos are deprecated).")) + + raise IOError(error) + + def close(self): + if hasattr(self, 'proc') and self.proc: + self.proc.stdin.close() + self.proc.stdin = None + if self.proc.stderr is not None: + self.proc.stderr.close() + self.proc.stdee = None + # If this causes deadlocks, consider terminating instead. + self.proc.wait() + self.proc = None + + def __del__(self): + # If the garbage collector comes, make sure the subprocess is terminated. + self.close() + + # Support the Context Manager protocol, to ensure that resources are cleaned up. + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + +@requires_duration +def ffmpeg_audiowrite(clip, filename, fps, nbytes, buffersize, + codec='libvorbis', bitrate=None, + write_logfile=False, verbose=True, + ffmpeg_params=None, logger='bar'): + """ + A function that wraps the FFMPEG_AudioWriter to write an AudioClip + to a file. + + NOTE: verbose is deprecated. + """ + + if write_logfile: + logfile = open(filename + ".log", 'w+') + else: + logfile = None + logger = proglog.default_bar_logger(logger) + logger(message="MoviePy - Writing audio in %s" % filename) + writer = FFMPEG_AudioWriter(filename, fps, nbytes, clip.nchannels, + codec=codec, bitrate=bitrate, + logfile=logfile, + ffmpeg_params=ffmpeg_params) + + for chunk in clip.iter_chunks(chunksize=buffersize, + quantize=True, + nbytes=nbytes, fps=fps, + logger=logger): + writer.write_frames(chunk) + + writer.close() + + if write_logfile: + logfile.close() + logger(message="MoviePy - Done.") diff --git a/.venv/Lib/site-packages/moviepy/audio/io/preview.py b/.venv/Lib/site-packages/moviepy/audio/io/preview.py new file mode 100644 index 00000000..e321b297 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/io/preview.py @@ -0,0 +1,68 @@ +import time + +import numpy as np + +import pygame as pg +from moviepy.decorators import requires_duration + +pg.init() +pg.display.set_caption('MoviePy') + + +@requires_duration +def preview(clip, fps=22050, buffersize=4000, nbytes=2, audioFlag=None, + videoFlag=None): + """ + Plays the sound clip with pygame. + + Parameters + ----------- + + fps + Frame rate of the sound. 44100 gives top quality, but may cause + problems if your computer is not fast enough and your clip is + complicated. If the sound jumps during the preview, lower it + (11025 is still fine, 5000 is tolerable). + + buffersize + The sound is not generated all at once, but rather made by bunches + of frames (chunks). ``buffersize`` is the size of such a chunk. + Try varying it if you meet audio problems (but you shouldn't + have to). + + nbytes: + Number of bytes to encode the sound: 1 for 8bit sound, 2 for + 16bit, 4 for 32bit sound. 2 bytes is fine. + + audioFlag, videoFlag: + Instances of class threading events that are used to synchronize + video and audio during ``VideoClip.preview()``. + + """ + + pg.mixer.quit() + + pg.mixer.init(fps, -8 * nbytes, clip.nchannels, 1024) + totalsize = int(fps*clip.duration) + pospos = np.array(list(range(0, totalsize, buffersize))+[totalsize]) + tt = (1.0/fps)*np.arange(pospos[0], pospos[1]) + sndarray = clip.to_soundarray(tt, nbytes=nbytes, quantize=True) + chunk = pg.sndarray.make_sound(sndarray) + + if (audioFlag is not None) and (videoFlag is not None): + audioFlag.set() + videoFlag.wait() + + channel = chunk.play() + for i in range(1, len(pospos)-1): + tt = (1.0/fps)*np.arange(pospos[i], pospos[i+1]) + sndarray = clip.to_soundarray(tt, nbytes=nbytes, quantize=True) + chunk = pg.sndarray.make_sound(sndarray) + while channel.get_queue(): + time.sleep(0.003) + if videoFlag is not None: + if not videoFlag.is_set(): + channel.stop() + del channel + return + channel.queue(chunk) diff --git a/.venv/Lib/site-packages/moviepy/audio/io/readers.py b/.venv/Lib/site-packages/moviepy/audio/io/readers.py new file mode 100644 index 00000000..991e60bb --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/audio/io/readers.py @@ -0,0 +1,254 @@ +import os +import subprocess as sp +import warnings + +import numpy as np + +from moviepy.compat import DEVNULL, PY3 +from moviepy.config import get_setting +from moviepy.video.io.ffmpeg_reader import ffmpeg_parse_infos + + +class FFMPEG_AudioReader: + """ + A class to read the audio in either video files or audio files + using ffmpeg. ffmpeg will read any audio and transform them into + raw data. + + Parameters + ------------ + + filename + Name of any video or audio file, like ``video.mp4`` or + ``sound.wav`` etc. + + buffersize + The size of the buffer to use. Should be bigger than the buffer + used by ``to_audiofile`` + + print_infos + Print the ffmpeg infos on the file being read (for debugging) + + fps + Desired frames per second in the decoded signal that will be + received from ffmpeg + + nbytes + Desired number of bytes (1,2,4) in the signal that will be + received from ffmpeg + + """ + + def __init__(self, filename, buffersize, print_infos=False, + fps=44100, nbytes=2, nchannels=2): + + self.filename = filename + self.nbytes = nbytes + self.fps = fps + self.f = 's%dle'%(8*nbytes) + self.acodec = 'pcm_s%dle'%(8*nbytes) + self.nchannels = nchannels + infos = ffmpeg_parse_infos(filename) + self.duration = infos['duration'] + if 'video_duration' in infos: + self.duration = infos['video_duration'] + else: + self.duration = infos['duration'] + self.infos = infos + self.proc = None + + self.nframes = int(self.fps * self.duration) + self.buffersize= min( self.nframes+1, buffersize ) + self.buffer= None + self.buffer_startframe = 1 + self.initialize() + self.buffer_around(1) + + def initialize(self, starttime = 0): + """ Opens the file, creates the pipe. """ + + self.close_proc() # if any + + if starttime !=0 : + offset = min(1,starttime) + i_arg = ["-ss", "%.05f"%(starttime-offset), + '-i', self.filename, '-vn', + "-ss", "%.05f"%offset] + else: + i_arg = [ '-i', self.filename, '-vn'] + + + cmd = ([get_setting("FFMPEG_BINARY")] + i_arg + + [ '-loglevel', 'error', + '-f', self.f, + '-acodec', self.acodec, + '-ar', "%d"%self.fps, + '-ac', '%d'%self.nchannels, '-']) + + popen_params = {"bufsize": self.buffersize, + "stdout": sp.PIPE, + "stderr": sp.PIPE, + "stdin": DEVNULL} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + self.proc = sp.Popen( cmd, **popen_params) + + self.pos = np.round(self.fps*starttime) + + + + def skip_chunk(self,chunksize): + s = self.proc.stdout.read(self.nchannels*chunksize*self.nbytes) + self.proc.stdout.flush() + self.pos = self.pos+chunksize + + + + def read_chunk(self,chunksize): + # chunksize is not being autoconverted from float to int + chunksize = int(round(chunksize)) + L = self.nchannels*chunksize*self.nbytes + s = self.proc.stdout.read(L) + dt = {1: 'int8',2:'int16',4:'int32'}[self.nbytes] + if hasattr(np, 'frombuffer'): + result = np.frombuffer(s, dtype=dt) + else: + result = np.fromstring(s, dtype=dt) + result = (1.0*result / 2**(8*self.nbytes-1)).\ + reshape((int(len(result)/self.nchannels), + self.nchannels)) + #self.proc.stdout.flush() + self.pos = self.pos+chunksize + return result + + + + def seek(self,pos): + """ + Reads a frame at time t. Note for coders: getting an arbitrary + frame in the video with ffmpeg can be painfully slow if some + decoding has to be done. This function tries to avoid fectching + arbitrary frames whenever possible, by moving between adjacent + frames. + """ + if (pos < self.pos) or (pos> (self.pos+1000000)): + t = 1.0*pos/self.fps + self.initialize(t) + elif pos > self.pos: + #print pos + self.skip_chunk(pos-self.pos) + # last case standing: pos = current pos + self.pos = pos + + + + def close_proc(self): + if hasattr(self, 'proc') and self.proc is not None: + self.proc.terminate() + for std in [ self.proc.stdout, + self.proc.stderr]: + std.close() + self.proc.wait() + self.proc = None + + def get_frame(self, tt): + + buffersize = self.buffersize + if isinstance(tt,np.ndarray): + # lazy implementation, but should not cause problems in + # 99.99 % of the cases + + + # elements of t that are actually in the range of the + # audio file. + in_time = (tt>=0) & (tt < self.duration) + + # Check that the requested time is in the valid range + if not in_time.any(): + raise IOError("Error in file %s, "%(self.filename)+ + "Accessing time t=%.02f-%.02f seconds, "%(tt[0], tt[-1])+ + "with clip duration=%d seconds, "%self.duration) + + # The np.round in the next line is super-important. + # Removing it results in artifacts in the noise. + frames = np.round((self.fps*tt)).astype(int)[in_time] + fr_min, fr_max = frames.min(), frames.max() + + if not (0 <= + (fr_min - self.buffer_startframe) + < len(self.buffer)): + self.buffer_around(fr_min) + elif not (0 <= + (fr_max - self.buffer_startframe) + < len(self.buffer)): + self.buffer_around(fr_max) + + try: + result = np.zeros((len(tt),self.nchannels)) + indices = frames - self.buffer_startframe + if len(self.buffer) < self.buffersize // 2: + indices = indices - (self.buffersize // 2 - len(self.buffer) + 1) + result[in_time] = self.buffer[indices] + return result + + except IndexError as error: + warnings.warn("Error in file %s, "%(self.filename)+ + "At time t=%.02f-%.02f seconds, "%(tt[0], tt[-1])+ + "indices wanted: %d-%d, "%(indices.min(), indices.max())+ + "but len(buffer)=%d\n"%(len(self.buffer))+ str(error), + UserWarning) + + # repeat the last frame instead + indices[indices>=len(self.buffer)] = len(self.buffer) -1 + result[in_time] = self.buffer[indices] + return result + + else: + + ind = int(self.fps*tt) + if ind<0 or ind> self.nframes: # out of time: return 0 + return np.zeros(self.nchannels) + + if not (0 <= (ind - self.buffer_startframe) = 3 + +try: + string_types = (str, unicode) # Python 2 +except NameError: + string_types = (str) # Python 3 + +try: + from subprocess import DEVNULL # Python 3 +except ImportError: + DEVNULL = open(os.devnull, 'wb') # Python 2 diff --git a/.venv/Lib/site-packages/moviepy/config.py b/.venv/Lib/site-packages/moviepy/config.py new file mode 100644 index 00000000..b1ce2f85 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/config.py @@ -0,0 +1,118 @@ +import os +import subprocess as sp + +from .compat import DEVNULL +from .config_defaults import FFMPEG_BINARY, IMAGEMAGICK_BINARY + +if os.name == 'nt': + try: + import winreg as wr # py3k + except ImportError: + import _winreg as wr # py2k + + +def try_cmd(cmd): + try: + popen_params = { + "stdout": sp.PIPE, + "stderr": sp.PIPE, + "stdin": DEVNULL + } + + # This was added so that no extra unwanted window opens on windows + # when the child process is created + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + proc = sp.Popen(cmd, **popen_params) + proc.communicate() + except Exception as err: + return False, err + else: + return True, None + +if FFMPEG_BINARY=='ffmpeg-imageio': + from imageio.plugins.ffmpeg import get_exe + FFMPEG_BINARY = get_exe() + +elif FFMPEG_BINARY=='auto-detect': + + if try_cmd(['ffmpeg'])[0]: + FFMPEG_BINARY = 'ffmpeg' + elif try_cmd(['ffmpeg.exe'])[0]: + FFMPEG_BINARY = 'ffmpeg.exe' + else: + FFMPEG_BINARY = 'unset' +else: + success, err = try_cmd([FFMPEG_BINARY]) + if not success: + raise IOError( + str(err) + + " - The path specified for the ffmpeg binary might be wrong") + +if IMAGEMAGICK_BINARY=='auto-detect': + if os.name == 'nt': + try: + key = wr.OpenKey(wr.HKEY_LOCAL_MACHINE, 'SOFTWARE\\ImageMagick\\Current') + IMAGEMAGICK_BINARY = wr.QueryValueEx(key, 'BinPath')[0] + r"\convert.exe" + key.Close() + except: + IMAGEMAGICK_BINARY = 'unset' + elif try_cmd(['convert'])[0]: + IMAGEMAGICK_BINARY = 'convert' + else: + IMAGEMAGICK_BINARY = 'unset' +else: + if not os.path.exists(IMAGEMAGICK_BINARY): + raise IOError( + "ImageMagick binary cannot be found at {}".format( + IMAGEMAGICK_BINARY + ) + ) + + if not os.path.isfile(IMAGEMAGICK_BINARY): + raise IOError( + "ImageMagick binary found at {} is not a file".format( + IMAGEMAGICK_BINARY + ) + ) + + success, err = try_cmd([IMAGEMAGICK_BINARY]) + if not success: + raise IOError("%s - The path specified for the ImageMagick binary might " + "be wrong: %s" % (err, IMAGEMAGICK_BINARY)) + + +def get_setting(varname): + """ Returns the value of a configuration variable. """ + gl = globals() + if varname not in gl.keys(): + raise ValueError("Unknown setting %s"%varname) + # Here, possibly add some code to raise exceptions if some + # parameter isn't set set properly, explaining on how to set it. + return gl[varname] + + +def change_settings(new_settings=None, filename=None): + """ Changes the value of configuration variables.""" + new_settings = new_settings or {} + gl = globals() + if filename: + with open(filename) as in_file: + exec(in_file) + gl.update(locals()) + gl.update(new_settings) + # Here you can add some code to check that the new configuration + # values are valid. + + +if __name__ == "__main__": + if try_cmd([FFMPEG_BINARY])[0]: + print( "MoviePy : ffmpeg successfully found." ) + else: + print( "MoviePy : can't find or access ffmpeg." ) + + if try_cmd([IMAGEMAGICK_BINARY])[0]: + print( "MoviePy : ImageMagick successfully found." ) + else: + print( "MoviePy : can't find or access ImageMagick." ) diff --git a/.venv/Lib/site-packages/moviepy/config_defaults.py b/.venv/Lib/site-packages/moviepy/config_defaults.py new file mode 100644 index 00000000..d88e1f7c --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/config_defaults.py @@ -0,0 +1,54 @@ +""" +Configuration of MoviePy + + +This file enables you to specify a configuration for MoviePy. In +particular you can enter the path to the FFMPEG and ImageMagick +binaries. + +Defaults must be done BEFORE installing MoviePy: first make the changes, +then install MoviePy with + + [sudo] python setup.py install + +Note that you can also change the path by setting environment variables. +e.g. + +Linux/Mac: + export FFMPEG_BINARY=path/to/ffmpeg + +Windows: + set FFMPEG_BINARY=path\to\ffmpeg + +Instructions +-------------- + +FFMPEG_BINARY + Normally you can leave this one to its default ('ffmpeg-imageio') at which + case image-io will download the right ffmpeg binary (at first use) and then + always use that binary. + The second option is 'auto-detect', in this case ffmpeg will be whatever + binary is found on the computer generally 'ffmpeg' (on linux) or 'ffmpeg.exe' + (on windows). + Third option: If you want to use a binary at a special location on you disk, + enter it like that: + + FFMPEG_BINARY = r"path/to/ffmpeg" # on linux + FFMPEG_BINARY = r"path\to\ffmpeg.exe" # on windows + + Warning: the 'r' before the path is important, especially on Windows. + + +IMAGEMAGICK_BINARY + For linux users, 'convert' should be fine. + For Windows users, you must specify the path to the ImageMagick + 'magick' binary. For instance: + + IMAGEMAGICK_BINARY = r"C:\Program Files\ImageMagick-6.8.8-Q16\magick.exe" + +""" + +import os + +FFMPEG_BINARY = os.getenv('FFMPEG_BINARY', 'ffmpeg-imageio') +IMAGEMAGICK_BINARY = os.getenv('IMAGEMAGICK_BINARY', 'auto-detect') diff --git a/.venv/Lib/site-packages/moviepy/decorators.py b/.venv/Lib/site-packages/moviepy/decorators.py new file mode 100644 index 00000000..277276ca --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/decorators.py @@ -0,0 +1,135 @@ +""" +all decorators used in moviepy go there +""" + +import decorator + +from moviepy.tools import cvsecs + + +@decorator.decorator +def outplace(f, clip, *a, **k): + """ Applies f(clip.copy(), *a, **k) and returns clip.copy()""" + newclip = clip.copy() + f(newclip, *a, **k) + return newclip + +@decorator.decorator +def convert_masks_to_RGB(f, clip, *a, **k): + """ If the clip is a mask, convert it to RGB before running the function """ + if clip.ismask: + clip = clip.to_RGB() + return f(clip, *a, **k) + +@decorator.decorator +def apply_to_mask(f, clip, *a, **k): + """ This decorator will apply the same function f to the mask of + the clip created with f """ + + newclip = f(clip, *a, **k) + if getattr(newclip, 'mask', None): + newclip.mask = f(newclip.mask, *a, **k) + return newclip + + + +@decorator.decorator +def apply_to_audio(f, clip, *a, **k): + """ This decorator will apply the function f to the audio of + the clip created with f """ + + newclip = f(clip, *a, **k) + if getattr(newclip, 'audio', None): + newclip.audio = f(newclip.audio, *a, **k) + return newclip + + +@decorator.decorator +def requires_duration(f, clip, *a, **k): + """ Raise an error if the clip has no duration.""" + + if clip.duration is None: + raise ValueError("Attribute 'duration' not set") + else: + return f(clip, *a, **k) + + + +@decorator.decorator +def audio_video_fx(f, clip, *a, **k): + """ Use an audio function on a video/audio clip + + This decorator tells that the function f (audioclip -> audioclip) + can be also used on a video clip, at which case it returns a + videoclip with unmodified video and modified audio. + """ + + if hasattr(clip, "audio"): + newclip = clip.copy() + if clip.audio is not None: + newclip.audio = f(clip.audio, *a, **k) + return newclip + else: + return f(clip, *a, **k) + +def preprocess_args(fun,varnames): + """ Applies fun to variables in varnames before launching the function """ + + def wrapper(f, *a, **kw): + if hasattr(f, "func_code"): + func_code = f.func_code # Python 2 + else: + func_code = f.__code__ # Python 3 + + names = func_code.co_varnames + new_a = [fun(arg) if (name in varnames) else arg + for (arg, name) in zip(a, names)] + new_kw = {k: fun(v) if k in varnames else v + for (k,v) in kw.items()} + return f(*new_a, **new_kw) + return decorator.decorator(wrapper) + + +def convert_to_seconds(varnames): + "Converts the specified variables to seconds" + return preprocess_args(cvsecs, varnames) + + + +@decorator.decorator +def add_mask_if_none(f, clip, *a, **k): + """ Add a mask to the clip if there is none. """ + if clip.mask is None: + clip = clip.add_mask() + return f(clip, *a, **k) + + + +@decorator.decorator +def use_clip_fps_by_default(f, clip, *a, **k): + """ Will use clip.fps if no fps=... is provided in **k """ + + def fun(fps): + if fps is not None: + return fps + elif getattr(clip, 'fps', None): + return clip.fps + raise AttributeError("No 'fps' (frames per second) attribute specified" + " for function %s and the clip has no 'fps' attribute. Either" + " provide e.g. fps=24 in the arguments of the function, or define" + " the clip's fps with `clip.fps=24`" % f.__name__) + + + if hasattr(f, "func_code"): + func_code = f.func_code # Python 2 + else: + func_code = f.__code__ # Python 3 + + names = func_code.co_varnames[1:] + + new_a = [fun(arg) if (name=='fps') else arg + for (arg, name) in zip(a, names)] + new_kw = {k: fun(v) if k=='fps' else v + for (k,v) in k.items()} + + return f(clip, *new_a, **new_kw) diff --git a/.venv/Lib/site-packages/moviepy/editor.py b/.venv/Lib/site-packages/moviepy/editor.py new file mode 100644 index 00000000..c5dff72b --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/editor.py @@ -0,0 +1,131 @@ +""" +This file is meant to make it easy to load the main features of +MoviePy by simply typing: + +>>> from moviepy.editor import * + +In particular it will load many effects from the video.fx and audio.fx +folders and turn them into VideoClip methods, so that instead of +>>> clip.fx( vfx.resize, 2 ) # or equivalently vfx.resize(clip, 2) +we can write +>>> clip.resize(2) + +It also starts a PyGame session (if PyGame is installed) and enables +clip.preview(). +""" + +# Note that these imports could have been performed in the __init__.py +# file, but this would make the loading of moviepy slower. + +import os +import sys + +# Downloads ffmpeg if it isn't already installed +import imageio +# Checks to see if the user has set a place for their own version of ffmpeg + +if os.getenv('FFMPEG_BINARY') is None: + if sys.version_info < (3, 4): + #uses an old version of imageio with ffmpeg.download. + imageio.plugins.ffmpeg.download() + +# Hide the welcome message from pygame: https://github.com/pygame/pygame/issues/542 +os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = "1" + +# Clips +from .video.io.VideoFileClip import VideoFileClip +from .video.io.ImageSequenceClip import ImageSequenceClip +from .video.io.downloader import download_webfile +from .video.VideoClip import VideoClip, ImageClip, ColorClip, TextClip +from .video.compositing.CompositeVideoClip import CompositeVideoClip, clips_array +from .video.compositing.concatenate import concatenate_videoclips, concatenate # concatenate=deprecated + +from .audio.AudioClip import AudioClip, CompositeAudioClip, concatenate_audioclips +from .audio.io.AudioFileClip import AudioFileClip + +# FX + +import moviepy.video.fx.all as vfx +import moviepy.audio.fx.all as afx +import moviepy.video.compositing.transitions as transfx + +# Tools + +import moviepy.video.tools as videotools +import moviepy.video.io.ffmpeg_tools as ffmpeg_tools +from .video.io.html_tools import ipython_display +from .tools import cvsecs + +try: + from .video.io.sliders import sliders +except ImportError: + pass + +# The next loop transforms many effects into VideoClip methods so that +# they can be walled with myclip.resize(width=500) instead of +# myclip.fx( vfx.resize, width= 500) +for method in [ + "afx.audio_fadein", + "afx.audio_fadeout", + "afx.audio_normalize", + "afx.volumex", + "transfx.crossfadein", + "transfx.crossfadeout", + "vfx.crop", + "vfx.fadein", + "vfx.fadeout", + "vfx.invert_colors", + "vfx.loop", + "vfx.margin", + "vfx.mask_and", + "vfx.mask_or", + "vfx.resize", + "vfx.rotate", + "vfx.speedx" + ]: + + exec("VideoClip.%s = %s" % (method.split('.')[1], method)) + + +for method in ["afx.audio_fadein", + "afx.audio_fadeout", + "afx.audio_loop", + "afx.audio_normalize", + "afx.volumex" + ]: + + exec("AudioClip.%s = %s" % (method.split('.')[1], method)) + + +# adds easy ipython integration +VideoClip.ipython_display = ipython_display +AudioClip.ipython_display = ipython_display +#----------------------------------------------------------------- +# Previews: try to import pygame, else make methods which raise +# exceptions saying to install PyGame + + +# Add methods preview and show (only if pygame installed) +try: + from moviepy.video.io.preview import show, preview +except ImportError: + def preview(self, *args, **kwargs): + """NOT AVAILABLE : clip.preview requires Pygame installed.""" + raise ImportError("clip.preview requires Pygame installed") + + def show(self, *args, **kwargs): + """NOT AVAILABLE : clip.show requires Pygame installed.""" + raise ImportError("clip.show requires Pygame installed") + + +VideoClip.preview = preview +VideoClip.show = show + +try: + from moviepy.audio.io.preview import preview +except ImportError: + def preview(self, *args, **kwargs): + """ NOT AVAILABLE : clip.preview requires Pygame installed.""" + raise ImportError("clip.preview requires Pygame installed") + +AudioClip.preview = preview diff --git a/.venv/Lib/site-packages/moviepy/tools.py b/.venv/Lib/site-packages/moviepy/tools.py new file mode 100644 index 00000000..5357c3a6 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/tools.py @@ -0,0 +1,177 @@ +""" +Misc. useful functions that can be used at many places in the program. +""" +import os +import subprocess as sp +import sys +import warnings + +import proglog + +from .compat import DEVNULL + + + +def sys_write_flush(s): + """ Writes and flushes without delay a text in the console """ + # Reason for not using `print` is that in some consoles "print" + # commands get delayed, while stdout.flush are instantaneous, + # so this method is better at providing feedback. + # See https://github.com/Zulko/moviepy/pull/485 + sys.stdout.write(s) + sys.stdout.flush() + + +def verbose_print(verbose, s): + """ Only prints s (with sys_write_flush) if verbose is True.""" + if verbose: + sys_write_flush(s) + + +def subprocess_call(cmd, logger='bar', errorprint=True): + """ Executes the given subprocess command. + + Set logger to None or a custom Proglog logger to avoid printings. + """ + logger = proglog.default_bar_logger(logger) + logger(message='Moviepy - Running:\n>>> "+ " ".join(cmd)') + + popen_params = {"stdout": DEVNULL, + "stderr": sp.PIPE, + "stdin": DEVNULL} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + proc = sp.Popen(cmd, **popen_params) + + out, err = proc.communicate() # proc.wait() + proc.stderr.close() + + if proc.returncode: + if errorprint: + logger(message='Moviepy - Command returned an error') + raise IOError(err.decode('utf8')) + else: + logger(message='Moviepy - Command successful') + + del proc + +def is_string(obj): + """ Returns true if s is string or string-like object, + compatible with Python 2 and Python 3.""" + try: + return isinstance(obj, basestring) + except NameError: + return isinstance(obj, str) + + +def cvsecs(time): + """ Will convert any time into seconds. + + If the type of `time` is not valid, + it's returned as is. + + Here are the accepted formats:: + + >>> cvsecs(15.4) # seconds + 15.4 + >>> cvsecs((1, 21.5)) # (min,sec) + 81.5 + >>> cvsecs((1, 1, 2)) # (hr, min, sec) + 3662 + >>> cvsecs('01:01:33.045') + 3693.045 + >>> cvsecs('01:01:33,5') # coma works too + 3693.5 + >>> cvsecs('1:33,5') # only minutes and secs + 99.5 + >>> cvsecs('33.5') # only secs + 33.5 + """ + factors = (1, 60, 3600) + + if is_string(time): + time = [float(f.replace(',', '.')) for f in time.split(':')] + + if not isinstance(time, (tuple, list)): + return time + + return sum(mult * part for mult, part in zip(factors, reversed(time))) + + +def deprecated_version_of(f, oldname, newname=None): + """ Indicates that a function is deprecated and has a new name. + + `f` is the new function, `oldname` the name of the deprecated + function, `newname` the name of `f`, which can be automatically + found. + + Returns + ======== + + f_deprecated + A function that does the same thing as f, but with a docstring + and a printed message on call which say that the function is + deprecated and that you should use f instead. + + Examples + ========= + + >>> # The badly named method 'to_file' is replaced by 'write_file' + >>> class Clip: + >>> def write_file(self, some args): + >>> # blablabla + >>> + >>> Clip.to_file = deprecated_version_of(Clip.write_file, 'to_file') + """ + + if newname is None: newname = f.__name__ + + warning= ("The function ``%s`` is deprecated and is kept temporarily " + "for backwards compatibility.\nPlease use the new name, " + "``%s``, instead.")%(oldname, newname) + + def fdepr(*a, **kw): + warnings.warn("MoviePy: " + warning, PendingDeprecationWarning) + return f(*a, **kw) + fdepr.__doc__ = warning + + return fdepr + + +# non-exhaustive dictionnary to store default informations. +# any addition is most welcome. +# Note that 'gif' is complicated to place. From a VideoFileClip point of view, +# it is a video, but from a HTML5 point of view, it is an image. + +extensions_dict = { "mp4": {'type':'video', 'codec':['libx264','libmpeg4', 'aac']}, + 'ogv': {'type':'video', 'codec':['libtheora']}, + 'webm': {'type':'video', 'codec':['libvpx']}, + 'avi': {'type':'video'}, + 'mov': {'type':'video'}, + + 'ogg': {'type':'audio', 'codec':['libvorbis']}, + 'mp3': {'type':'audio', 'codec':['libmp3lame']}, + 'wav': {'type':'audio', 'codec':['pcm_s16le', 'pcm_s24le', 'pcm_s32le']}, + 'm4a': {'type':'audio', 'codec':['libfdk_aac']} + } + +for ext in ["jpg", "jpeg", "png", "bmp", "tiff"]: + extensions_dict[ext] = {'type':'image'} + + +def find_extension(codec): + if codec in extensions_dict: + # codec is already the extension + return codec + + for ext,infos in extensions_dict.items(): + if codec in infos.get('codec', []): + return ext + raise ValueError( + "The audio_codec you chose is unknown by MoviePy. " + "You should report this. In the meantime, you can " + "specify a temp_audiofile with the right extension " + "in write_videofile." + ) diff --git a/.venv/Lib/site-packages/moviepy/utils.py b/.venv/Lib/site-packages/moviepy/utils.py new file mode 100644 index 00000000..705cb495 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/utils.py @@ -0,0 +1,20 @@ +from moviepy.audio.io.AudioFileClip import AudioFileClip +from moviepy.video.io.VideoFileClip import VideoFileClip +from moviepy.video.VideoClip import ImageClip + + +CLIP_TYPES = { + 'audio': AudioFileClip, + 'video': VideoFileClip, + 'image': ImageClip, +} + +def close_all_clips(objects='globals', types=('audio', 'video', 'image')): + if objects == 'globals': + objects = globals() + if hasattr(objects, 'values'): + objects = objects.values() + types_tuple = tuple(CLIP_TYPES[key] for key in types) + for obj in objects: + if isinstance(obj, types_tuple): + obj.close() diff --git a/.venv/Lib/site-packages/moviepy/version.py b/.venv/Lib/site-packages/moviepy/version.py new file mode 100644 index 00000000..976498ab --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/version.py @@ -0,0 +1 @@ +__version__ = "1.0.3" diff --git a/.venv/Lib/site-packages/moviepy/video/VideoClip.py b/.venv/Lib/site-packages/moviepy/video/VideoClip.py new file mode 100644 index 00000000..d19d2535 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/VideoClip.py @@ -0,0 +1,1195 @@ +""" +This module implements VideoClip (base class for video clips) and its +main subclasses: +- Animated clips: VideofileClip, ImageSequenceClip +- Static image clips: ImageClip, ColorClip, TextClip, +""" +import os +import subprocess as sp +import tempfile +import warnings + +import numpy as np +import proglog +from imageio import imread, imsave + +from ..Clip import Clip +from ..compat import DEVNULL, string_types +from ..config import get_setting +from ..decorators import (add_mask_if_none, apply_to_mask, + convert_masks_to_RGB, convert_to_seconds, outplace, + requires_duration, use_clip_fps_by_default) +from ..tools import (deprecated_version_of, extensions_dict, find_extension, + is_string, subprocess_call) +from .io.ffmpeg_writer import ffmpeg_write_video +from .io.gif_writers import (write_gif, write_gif_with_image_io, + write_gif_with_tempfiles) +from .tools.drawing import blit + + +class VideoClip(Clip): + """Base class for video clips. + + See ``VideoFileClip``, ``ImageClip`` etc. for more user-friendly + classes. + + + Parameters + ----------- + + ismask + `True` if the clip is going to be used as a mask. + + + Attributes + ---------- + + size + The size of the clip, (width,heigth), in pixels. + + w, h + The width and height of the clip, in pixels. + + ismask + Boolean set to `True` if the clip is a mask. + + make_frame + A function ``t-> frame at time t`` where ``frame`` is a + w*h*3 RGB array. + + mask (default None) + VideoClip mask attached to this clip. If mask is ``None``, + The video clip is fully opaque. + + audio (default None) + An AudioClip instance containing the audio of the video clip. + + pos + A function ``t->(x,y)`` where ``x,y`` is the position + of the clip when it is composed with other clips. + See ``VideoClip.set_pos`` for more details + + relative_pos + See variable ``pos``. + + """ + + def __init__(self, make_frame=None, ismask=False, duration=None, + has_constant_size=True): + Clip.__init__(self) + self.mask = None + self.audio = None + self.pos = lambda t: (0, 0) + self.relative_pos = False + if make_frame: + self.make_frame = make_frame + self.size = self.get_frame(0).shape[:2][::-1] + self.ismask = ismask + self.has_constant_size=has_constant_size + if duration is not None: + self.duration = duration + self.end = duration + + @property + def w(self): + return self.size[0] + + @property + def h(self): + return self.size[1] + + @property + def aspect_ratio(self): + return self.w / float(self.h) + + # =============================================================== + # EXPORT OPERATIONS + + @convert_to_seconds(['t']) + @convert_masks_to_RGB + def save_frame(self, filename, t=0, withmask=True): + """ Save a clip's frame to an image file. + + Saves the frame of clip corresponding to time ``t`` in + 'filename'. ``t`` can be expressed in seconds (15.35), in + (min, sec), in (hour, min, sec), or as a string: '01:03:05.35'. + + If ``withmask`` is ``True`` the mask is saved in + the alpha layer of the picture (only works with PNGs). + + """ + + im = self.get_frame(t) + if withmask and self.mask is not None: + mask = 255 * self.mask.get_frame(t) + im = np.dstack([im, mask]).astype('uint8') + else: + im = im.astype("uint8") + + imsave(filename, im) + + @requires_duration + @use_clip_fps_by_default + @convert_masks_to_RGB + def write_videofile(self, filename, fps=None, codec=None, + bitrate=None, audio=True, audio_fps=44100, + preset="medium", + audio_nbytes=4, audio_codec=None, + audio_bitrate=None, audio_bufsize=2000, + temp_audiofile=None, + rewrite_audio=True, remove_temp=True, + write_logfile=False, verbose=True, + threads=None, ffmpeg_params=None, + logger='bar'): + """Write the clip to a videofile. + + Parameters + ----------- + + filename + Name of the video file to write in. + The extension must correspond to the "codec" used (see below), + or simply be '.avi' (which will work with any codec). + + fps + Number of frames per second in the resulting video file. If None is + provided, and the clip has an fps attribute, this fps will be used. + + codec + Codec to use for image encoding. Can be any codec supported + by ffmpeg. If the filename is has extension '.mp4', '.ogv', '.webm', + the codec will be set accordingly, but you can still set it if you + don't like the default. For other extensions, the output filename + must be set accordingly. + + Some examples of codecs are: + + ``'libx264'`` (default codec for file extension ``.mp4``) + makes well-compressed videos (quality tunable using 'bitrate'). + + + ``'mpeg4'`` (other codec for extension ``.mp4``) can be an alternative + to ``'libx264'``, and produces higher quality videos by default. + + + ``'rawvideo'`` (use file extension ``.avi``) will produce + a video of perfect quality, of possibly very huge size. + + + ``png`` (use file extension ``.avi``) will produce a video + of perfect quality, of smaller size than with ``rawvideo``. + + + ``'libvorbis'`` (use file extension ``.ogv``) is a nice video + format, which is completely free/ open source. However not + everyone has the codecs installed by default on their machine. + + + ``'libvpx'`` (use file extension ``.webm``) is tiny a video + format well indicated for web videos (with HTML5). Open source. + + + audio + Either ``True``, ``False``, or a file name. + If ``True`` and the clip has an audio clip attached, this + audio clip will be incorporated as a soundtrack in the movie. + If ``audio`` is the name of an audio file, this audio file + will be incorporated as a soundtrack in the movie. + + audiofps + frame rate to use when generating the sound. + + temp_audiofile + the name of the temporary audiofile to be generated and + incorporated in the the movie, if any. + + audio_codec + Which audio codec should be used. Examples are 'libmp3lame' + for '.mp3', 'libvorbis' for 'ogg', 'libfdk_aac':'m4a', + 'pcm_s16le' for 16-bit wav and 'pcm_s32le' for 32-bit wav. + Default is 'libmp3lame', unless the video extension is 'ogv' + or 'webm', at which case the default is 'libvorbis'. + + audio_bitrate + Audio bitrate, given as a string like '50k', '500k', '3000k'. + Will determine the size/quality of audio in the output file. + Note that it mainly an indicative goal, the bitrate won't + necessarily be the this in the final file. + + preset + Sets the time that FFMPEG will spend optimizing the compression. + Choices are: ultrafast, superfast, veryfast, faster, fast, medium, + slow, slower, veryslow, placebo. Note that this does not impact + the quality of the video, only the size of the video file. So + choose ultrafast when you are in a hurry and file size does not + matter. + + threads + Number of threads to use for ffmpeg. Can speed up the writing of + the video on multicore computers. + + ffmpeg_params + Any additional ffmpeg parameters you would like to pass, as a list + of terms, like ['-option1', 'value1', '-option2', 'value2']. + + write_logfile + If true, will write log files for the audio and the video. + These will be files ending with '.log' with the name of the + output file in them. + + logger + Either "bar" for progress bar or None or any Proglog logger. + + verbose (deprecated, kept for compatibility) + Formerly used for toggling messages on/off. Use logger=None now. + + Examples + ======== + + >>> from moviepy.editor import VideoFileClip + >>> clip = VideoFileClip("myvideo.mp4").subclip(100,120) + >>> clip.write_videofile("my_new_video.mp4") + >>> clip.close() + + """ + name, ext = os.path.splitext(os.path.basename(filename)) + ext = ext[1:].lower() + logger = proglog.default_bar_logger(logger) + + if codec is None: + + try: + codec = extensions_dict[ext]['codec'][0] + except KeyError: + raise ValueError("MoviePy couldn't find the codec associated " + "with the filename. Provide the 'codec' " + "parameter in write_videofile.") + + if audio_codec is None: + if ext in ['ogv', 'webm']: + audio_codec = 'libvorbis' + else: + audio_codec = 'libmp3lame' + elif audio_codec == 'raw16': + audio_codec = 'pcm_s16le' + elif audio_codec == 'raw32': + audio_codec = 'pcm_s32le' + + audiofile = audio if is_string(audio) else None + make_audio = ((audiofile is None) and (audio == True) and + (self.audio is not None)) + + if make_audio and temp_audiofile: + # The audio will be the clip's audio + audiofile = temp_audiofile + elif make_audio: + audio_ext = find_extension(audio_codec) + audiofile = (name + Clip._TEMP_FILES_PREFIX + "wvf_snd.%s" % audio_ext) + + # enough cpu for multiprocessing ? USELESS RIGHT NOW, WILL COME AGAIN + # enough_cpu = (multiprocessing.cpu_count() > 1) + logger(message="Moviepy - Building video %s." % filename) + if make_audio: + self.audio.write_audiofile(audiofile, audio_fps, + audio_nbytes, audio_bufsize, + audio_codec, bitrate=audio_bitrate, + write_logfile=write_logfile, + verbose=verbose, + logger=logger) + + ffmpeg_write_video(self, filename, fps, codec, + bitrate=bitrate, + preset=preset, + write_logfile=write_logfile, + audiofile=audiofile, + verbose=verbose, threads=threads, + ffmpeg_params=ffmpeg_params, + logger=logger) + + if remove_temp and make_audio: + if os.path.exists(audiofile): + os.remove(audiofile) + logger(message="Moviepy - video ready %s" % filename) + + @requires_duration + @use_clip_fps_by_default + @convert_masks_to_RGB + def write_images_sequence(self, nameformat, fps=None, verbose=True, + withmask=True, logger='bar'): + """ Writes the videoclip to a sequence of image files. + + Parameters + ----------- + + nameformat + A filename specifying the numerotation format and extension + of the pictures. For instance "frame%03d.png" for filenames + indexed with 3 digits and PNG format. Also possible: + "some_folder/frame%04d.jpeg", etc. + + fps + Number of frames per second to consider when writing the + clip. If not specified, the clip's ``fps`` attribute will + be used if it has one. + + withmask + will save the clip's mask (if any) as an alpha canal (PNGs only). + + verbose + Boolean indicating whether to print information. + + logger + Either 'bar' (progress bar) or None or any Proglog logger. + + + Returns + -------- + + names_list + A list of all the files generated. + + Notes + ------ + + The resulting image sequence can be read using e.g. the class + ``ImageSequenceClip``. + + """ + logger = proglog.default_bar_logger(logger) + logger(message='Moviepy - Writing frames %s.' % nameformat) + + tt = np.arange(0, self.duration, 1.0 / fps) + + filenames = [] + for i, t in logger.iter_bar(t=list(enumerate(tt))): + name = nameformat % i + filenames.append(name) + self.save_frame(name, t, withmask=withmask) + logger(message='Moviepy - Done writing frames %s.' % nameformat) + + return filenames + + @requires_duration + @convert_masks_to_RGB + def write_gif(self, filename, fps=None, program='imageio', + opt='nq', fuzz=1, verbose=True, + loop=0, dispose=False, colors=None, tempfiles=False, + logger='bar'): + """ Write the VideoClip to a GIF file. + + Converts a VideoClip into an animated GIF using ImageMagick + or ffmpeg. + + Parameters + ----------- + + filename + Name of the resulting gif file. + + fps + Number of frames per second (see note below). If it + isn't provided, then the function will look for the clip's + ``fps`` attribute (VideoFileClip, for instance, have one). + + program + Software to use for the conversion, either 'imageio' (this will use + the library FreeImage through ImageIO), or 'ImageMagick', or 'ffmpeg'. + + opt + Optimalization to apply. If program='imageio', opt must be either 'wu' + (Wu) or 'nq' (Neuquant). If program='ImageMagick', + either 'optimizeplus' or 'OptimizeTransparency'. + + fuzz + (ImageMagick only) Compresses the GIF by considering that + the colors that are less than fuzz% different are in fact + the same. + + tempfiles + Writes every frame to a file instead of passing them in the RAM. + Useful on computers with little RAM. Can only be used with + ImageMagick' or 'ffmpeg'. + + progress_bar + If True, displays a progress bar + + + Notes + ----- + + The gif will be playing the clip in real time (you can + only change the frame rate). If you want the gif to be played + slower than the clip you will use :: + + >>> # slow down clip 50% and make it a gif + >>> myClip.speedx(0.5).to_gif('myClip.gif') + + """ + # A little sketchy at the moment, maybe move all that in write_gif, + # refactor a little... we will see. + + if program == 'imageio': + write_gif_with_image_io(self, filename, fps=fps, opt=opt, loop=loop, + verbose=verbose, colors=colors, + logger=logger) + elif tempfiles: + # convert imageio opt variable to something that can be used with + # ImageMagick + opt = 'optimizeplus' if opt == 'nq' else 'OptimizeTransparency' + write_gif_with_tempfiles(self, filename, fps=fps, + program=program, opt=opt, fuzz=fuzz, + verbose=verbose, loop=loop, + dispose=dispose, colors=colors, + logger=logger) + else: + # convert imageio opt variable to something that can be used with + # ImageMagick + opt = 'optimizeplus' if opt == 'nq' else 'OptimizeTransparency' + write_gif(self, filename, fps=fps, program=program, + opt=opt, fuzz=fuzz, verbose=verbose, loop=loop, + dispose=dispose, colors=colors, + logger=logger) + + # ----------------------------------------------------------------- + # F I L T E R I N G + + def subfx(self, fx, ta=0, tb=None, **kwargs): + """Apply a transformation to a part of the clip. + + Returns a new clip in which the function ``fun`` (clip->clip) + has been applied to the subclip between times `ta` and `tb` + (in seconds). + + Examples + --------- + + >>> # The scene between times t=3s and t=6s in ``clip`` will be + >>> # be played twice slower in ``newclip`` + >>> newclip = clip.subapply(lambda c:c.speedx(0.5) , 3,6) + + """ + left = self.subclip(0, ta) if ta else None + center = self.subclip(ta, tb).fx(fx, **kwargs) + right = self.subclip(t_start=tb) if tb else None + + clips = [c for c in (left, center, right) if c] + + # beurk, have to find other solution + from moviepy.video.compositing.concatenate import concatenate_videoclips + + return concatenate_videoclips(clips).set_start(self.start) + + # IMAGE FILTERS + + def fl_image(self, image_func, apply_to=None): + """ + Modifies the images of a clip by replacing the frame + `get_frame(t)` by another frame, `image_func(get_frame(t))` + """ + apply_to = apply_to or [] + return self.fl(lambda gf, t: image_func(gf(t)), apply_to) + + # -------------------------------------------------------------- + # C O M P O S I T I N G + + def fill_array(self, pre_array, shape=(0, 0)): + pre_shape = pre_array.shape + dx = shape[0] - pre_shape[0] + dy = shape[1] - pre_shape[1] + post_array = pre_array + if dx < 0: + post_array = pre_array[:shape[0]] + elif dx > 0: + x_1 = [[[1, 1, 1]] * pre_shape[1]] * dx + post_array = np.vstack((pre_array, x_1)) + if dy < 0: + post_array = post_array[:, :shape[1]] + elif dy > 0: + x_1 = [[[1, 1, 1]] * dy] * post_array.shape[0] + post_array = np.hstack((post_array, x_1)) + return post_array + + def blit_on(self, picture, t): + """ + Returns the result of the blit of the clip's frame at time `t` + on the given `picture`, the position of the clip being given + by the clip's ``pos`` attribute. Meant for compositing. + """ + hf, wf = framesize = picture.shape[:2] + + if self.ismask and picture.max(): + return np.minimum(1, picture + self.blit_on(np.zeros(framesize), t)) + + ct = t - self.start # clip time + + # GET IMAGE AND MASK IF ANY + + img = self.get_frame(ct) + mask = self.mask.get_frame(ct) if self.mask else None + + if mask is not None and ((img.shape[0] != mask.shape[0]) or (img.shape[1] != mask.shape[1])): + img = self.fill_array(img, mask.shape) + + hi, wi = img.shape[:2] + + # SET POSITION + pos = self.pos(ct) + + # preprocess short writings of the position + if isinstance(pos, str): + pos = {'center': ['center', 'center'], + 'left': ['left', 'center'], + 'right': ['right', 'center'], + 'top': ['center', 'top'], + 'bottom': ['center', 'bottom']}[pos] + else: + pos = list(pos) + + # is the position relative (given in % of the clip's size) ? + if self.relative_pos: + for i, dim in enumerate([wf, hf]): + if not isinstance(pos[i], str): + pos[i] = dim * pos[i] + + if isinstance(pos[0], str): + D = {'left': 0, 'center': (wf - wi) / 2, 'right': wf - wi} + pos[0] = D[pos[0]] + + if isinstance(pos[1], str): + D = {'top': 0, 'center': (hf - hi) / 2, 'bottom': hf - hi} + pos[1] = D[pos[1]] + + pos = map(int, pos) + + return blit(img, picture, pos, mask=mask, ismask=self.ismask) + + def add_mask(self): + """Add a mask VideoClip to the VideoClip. + + Returns a copy of the clip with a completely opaque mask + (made of ones). This makes computations slower compared to + having a None mask but can be useful in many cases. Choose + + Set ``constant_size`` to `False` for clips with moving + image size. + """ + if self.has_constant_size: + mask = ColorClip(self.size, 1.0, ismask=True) + return self.set_mask(mask.set_duration(self.duration)) + else: + make_frame = lambda t: np.ones(self.get_frame(t).shape[:2], dtype=float) + mask = VideoClip(ismask=True, make_frame=make_frame) + return self.set_mask(mask.set_duration(self.duration)) + + def on_color(self, size=None, color=(0, 0, 0), pos=None, + col_opacity=None): + """Place the clip on a colored background. + + Returns a clip made of the current clip overlaid on a color + clip of a possibly bigger size. Can serve to flatten transparent + clips. + + Parameters + ----------- + + size + Size (width, height) in pixels of the final clip. + By default it will be the size of the current clip. + + color + Background color of the final clip ([R,G,B]). + + pos + Position of the clip in the final clip. 'center' is the default + + col_opacity + Parameter in 0..1 indicating the opacity of the colored + background. + + """ + from .compositing.CompositeVideoClip import CompositeVideoClip + + if size is None: + size = self.size + if pos is None: + pos = 'center' + colorclip = ColorClip(size, color=color) + + if col_opacity is not None: + colorclip = (ColorClip(size, color=color, duration=self.duration) + .set_opacity(col_opacity)) + result = CompositeVideoClip([colorclip, self.set_position(pos)]) + else: + result = CompositeVideoClip([self.set_position(pos)], + size=size, + bg_color=color) + + if (isinstance(self, ImageClip) and (not hasattr(pos, "__call__")) + and ((self.mask is None) or isinstance(self.mask, ImageClip))): + new_result = result.to_ImageClip() + if result.mask is not None: + new_result.mask = result.mask.to_ImageClip() + return new_result.set_duration(result.duration) + + return result + + @outplace + def set_make_frame(self, mf): + """Change the clip's ``get_frame``. + + Returns a copy of the VideoClip instance, with the make_frame + attribute set to `mf`. + """ + self.make_frame = mf + self.size = self.get_frame(0).shape[:2][::-1] + + @outplace + def set_audio(self, audioclip): + """Attach an AudioClip to the VideoClip. + + Returns a copy of the VideoClip instance, with the `audio` + attribute set to ``audio``, which must be an AudioClip instance. + """ + self.audio = audioclip + + @outplace + def set_mask(self, mask): + """Set the clip's mask. + + Returns a copy of the VideoClip with the mask attribute set to + ``mask``, which must be a greyscale (values in 0-1) VideoClip""" + assert mask is None or mask.ismask + self.mask = mask + + @add_mask_if_none + @outplace + def set_opacity(self, op): + """Set the opacity/transparency level of the clip. + + Returns a semi-transparent copy of the clip where the mask is + multiplied by ``op`` (any float, normally between 0 and 1). + """ + self.mask = self.mask.fl_image(lambda pic: op * pic) + + @apply_to_mask + @outplace + def set_position(self, pos, relative=False): + """Set the clip's position in compositions. + + Sets the position that the clip will have when included + in compositions. The argument ``pos`` can be either a couple + ``(x,y)`` or a function ``t-> (x,y)``. `x` and `y` mark the + location of the top left corner of the clip, and can be + of several types. + + Examples + ---------- + + >>> clip.set_position((45,150)) # x=45, y=150 + >>> + >>> # clip horizontally centered, at the top of the picture + >>> clip.set_position(("center","top")) + >>> + >>> # clip is at 40% of the width, 70% of the height: + >>> clip.set_position((0.4,0.7), relative=True) + >>> + >>> # clip's position is horizontally centered, and moving up ! + >>> clip.set_position(lambda t: ('center', 50+t) ) + + """ + self.relative_pos = relative + if hasattr(pos, '__call__'): + self.pos = pos + else: + self.pos = lambda t: pos + + # -------------------------------------------------------------- + # CONVERSIONS TO OTHER TYPES + + @convert_to_seconds(['t']) + def to_ImageClip(self, t=0, with_mask=True, duration=None): + """ + Returns an ImageClip made out of the clip's frame at time ``t``, + which can be expressed in seconds (15.35), in (min, sec), + in (hour, min, sec), or as a string: '01:03:05.35'. + """ + newclip = ImageClip(self.get_frame(t), ismask=self.ismask, + duration=duration) + if with_mask and self.mask is not None: + newclip.mask = self.mask.to_ImageClip(t) + return newclip + + def to_mask(self, canal=0): + """Return a mask a video clip made from the clip.""" + if self.ismask: + return self + else: + newclip = self.fl_image(lambda pic: + 1.0 * pic[:, :, canal] / 255) + newclip.ismask = True + return newclip + + def to_RGB(self): + """Return a non-mask video clip made from the mask video clip.""" + if self.ismask: + f = lambda pic: np.dstack(3 * [255 * pic]).astype('uint8') + newclip = self.fl_image(f) + newclip.ismask = False + return newclip + else: + return self + + # ---------------------------------------------------------------- + # Audio + + @outplace + def without_audio(self): + """Remove the clip's audio. + + Return a copy of the clip with audio set to None. + + """ + self.audio = None + + @outplace + def afx(self, fun, *a, **k): + """Transform the clip's audio. + + Return a new clip whose audio has been transformed by ``fun``. + + """ + self.audio = self.audio.fx(fun, *a, **k) + + +class DataVideoClip(VideoClip): + """ + Class of video clips whose successive frames are functions + of successive datasets + + Parameters + ----------- + data + A liste of datasets, each dataset being used for one frame of the clip + + data_to_frame + A function d -> video frame, where d is one element of the list `data` + + fps + Number of frames per second in the animation + + Examples + --------- + """ + + def __init__(self, data, data_to_frame, fps, ismask=False, + has_constant_size=True): + self.data = data + self.data_to_frame = data_to_frame + self.fps = fps + make_frame = lambda t: self.data_to_frame(self.data[int(self.fps*t)]) + VideoClip.__init__(self, make_frame, ismask=ismask, + duration=1.0*len(data)/fps, + has_constant_size=has_constant_size) + + +class UpdatedVideoClip(VideoClip): + """ + Class of clips whose make_frame requires some objects to + be updated. Particularly practical in science where some + algorithm needs to make some steps before a new frame can + be generated. + + UpdatedVideoClips have the following make_frame: + + >>> def make_frame(t): + >>> while self.world.clip_t < t: + >>> world.update() # updates, and increases world.clip_t + >>> return world.to_frame() + + Parameters + ----------- + + world + An object with the following attributes: + - world.clip_t : the clip's time corresponding to the + world's state + - world.update() : update the world's state, (including + increasing world.clip_t of one time step) + - world.to_frame() : renders a frame depending on the world's state + + ismask + True if the clip is a WxH mask with values in 0-1 + + duration + Duration of the clip, in seconds + + """ + + def __init__(self, world, ismask=False, duration=None): + self.world = world + + def make_frame(t): + while self.world.clip_t < t: + world.update() + return world.to_frame() + + VideoClip.__init__(self, make_frame=make_frame, + ismask=ismask, duration=duration) + + +"""--------------------------------------------------------------------- + + ImageClip (base class for all 'static clips') and its subclasses + ColorClip and TextClip. + I would have liked to put these in a separate file but Python is bad + at cyclic imports. + +---------------------------------------------------------------------""" + + +class ImageClip(VideoClip): + """Class for non-moving VideoClips. + + A video clip originating from a picture. This clip will simply + display the given picture at all times. + + Examples + --------- + + >>> clip = ImageClip("myHouse.jpeg") + >>> clip = ImageClip( someArray ) # a Numpy array represent + + Parameters + ----------- + + img + Any picture file (png, tiff, jpeg, etc.) or any array representing + an RGB image (for instance a frame from a VideoClip). + + ismask + Set this parameter to `True` if the clip is a mask. + + transparent + Set this parameter to `True` (default) if you want the alpha layer + of the picture (if it exists) to be used as a mask. + + Attributes + ----------- + + img + Array representing the image of the clip. + + """ + + def __init__(self, img, ismask=False, transparent=True, + fromalpha=False, duration=None): + VideoClip.__init__(self, ismask=ismask, duration=duration) + + if isinstance(img, string_types): + img = imread(img) + + if len(img.shape) == 3: # img is (now) a RGB(a) numpy array + + if img.shape[2] == 4: + if fromalpha: + img = 1.0 * img[:, :, 3] / 255 + elif ismask: + img = 1.0 * img[:, :, 0] / 255 + elif transparent: + self.mask = ImageClip( + 1.0 * img[:, :, 3] / 255, ismask=True) + img = img[:, :, :3] + elif ismask: + img = 1.0 * img[:, :, 0] / 255 + + # if the image was just a 2D mask, it should arrive here + # unchanged + self.make_frame = lambda t: img + self.size = img.shape[:2][::-1] + self.img = img + + def fl(self, fl, apply_to=None, keep_duration=True): + """General transformation filter. + + Equivalent to VideoClip.fl . The result is no more an + ImageClip, it has the class VideoClip (since it may be animated) + """ + if apply_to is None: + apply_to = [] + # When we use fl on an image clip it may become animated. + # Therefore the result is not an ImageClip, just a VideoClip. + newclip = VideoClip.fl(self, fl, apply_to=apply_to, + keep_duration=keep_duration) + newclip.__class__ = VideoClip + return newclip + + @outplace + def fl_image(self, image_func, apply_to=None): + """Image-transformation filter. + + Does the same as VideoClip.fl_image, but for ImageClip the + tranformed clip is computed once and for all at the beginning, + and not for each 'frame'. + """ + if apply_to is None: + apply_to = [] + arr = image_func(self.get_frame(0)) + self.size = arr.shape[:2][::-1] + self.make_frame = lambda t: arr + self.img = arr + + for attr in apply_to: + a = getattr(self, attr, None) + if a is not None: + new_a = a.fl_image(image_func) + setattr(self, attr, new_a) + + @outplace + def fl_time(self, time_func, apply_to=None, + keep_duration=False): + """Time-transformation filter. + + Applies a transformation to the clip's timeline + (see Clip.fl_time). + + This method does nothing for ImageClips (but it may affect their + masks or their audios). The result is still an ImageClip. + """ + if apply_to is None: + apply_to = ['mask', 'audio'] + for attr in apply_to: + a = getattr(self, attr, None) + if a is not None: + new_a = a.fl_time(time_func) + setattr(self, attr, new_a) + + +# ## +# +# The old functions to_videofile, to_gif, to_images sequences have been +# replaced by the more explicite write_videofile, write_gif, etc. + +VideoClip.set_pos = deprecated_version_of(VideoClip.set_position, + 'set_pos') +VideoClip.to_videofile = deprecated_version_of(VideoClip.write_videofile, + 'to_videofile') +VideoClip.to_gif = deprecated_version_of(VideoClip.write_gif, 'to_gif') +VideoClip.to_images_sequence = deprecated_version_of(VideoClip.write_images_sequence, + 'to_images_sequence') + + +class ColorClip(ImageClip): + """An ImageClip showing just one color. + + Parameters + ----------- + + size + Size (width, height) in pixels of the clip. + + color + If argument ``ismask`` is False, ``color`` indicates + the color in RGB of the clip (default is black). If `ismask`` + is True, ``color`` must be a float between 0 and 1 (default is 1) + + ismask + Set to true if the clip will be used as a mask. + + col + Has been deprecated. Do not use. + """ + + def __init__(self, size, color=None, ismask=False, duration=None, col=None): + if col is not None: + warnings.warn("The `ColorClip` parameter `col` has been deprecated." + " Please use `color` instead.", DeprecationWarning) + if color is not None: + warnings.warn("The arguments `color` and `col` have both been " + "passed to `ColorClip` so `col` has been ignored.", + UserWarning) + else: + color = col + w, h = size + shape = (h, w) if np.isscalar(color) else (h, w, len(color)) + ImageClip.__init__(self, np.tile(color, w * h).reshape(shape), + ismask=ismask, duration=duration) + + +class TextClip(ImageClip): + """Class for autogenerated text clips. + + Creates an ImageClip originating from a script-generated text image. + Requires ImageMagick. + + Parameters + ----------- + + txt + A string of the text to write. Can be replaced by argument + ``filename``. + + filename + The name of a file in which there is the text to write. + Can be provided instead of argument ``txt`` + + size + Size of the picture in pixels. Can be auto-set if + method='label', but mandatory if method='caption'. + the height can be None, it will then be auto-determined. + + bg_color + Color of the background. See ``TextClip.list('color')`` + for a list of acceptable names. + + color + Color of the text. See ``TextClip.list('color')`` for a + list of acceptable names. + + font + Name of the font to use. See ``TextClip.list('font')`` for + the list of fonts you can use on your computer. + + stroke_color + Color of the stroke (=contour line) of the text. If ``None``, + there will be no stroke. + + stroke_width + Width of the stroke, in pixels. Can be a float, like 1.5. + + method + Either 'label' (default, the picture will be autosized so as to fit + exactly the size) or 'caption' (the text will be drawn in a picture + with fixed size provided with the ``size`` argument). If `caption`, + the text will be wrapped automagically (sometimes it is buggy, not + my fault, complain to the ImageMagick crew) and can be aligned or + centered (see parameter ``align``). + + kerning + Changes the default spacing between letters. For + instance ``kerning=-1`` will make the letters 1 pixel nearer from + ach other compared to the default spacing. + + align + center | East | West | South | North . Will only work if ``method`` + is set to ``caption`` + + transparent + ``True`` (default) if you want to take into account the + transparency in the image. + + """ + + def __init__(self, txt=None, filename=None, size=None, color='black', + bg_color='transparent', fontsize=None, font='Courier', + stroke_color=None, stroke_width=1, method='label', + kerning=None, align='center', interline=None, + tempfilename=None, temptxt=None, + transparent=True, remove_temp=True, + print_cmd=False): + + if txt is not None: + if temptxt is None: + temptxt_fd, temptxt = tempfile.mkstemp(suffix='.txt') + try: # only in Python3 will this work + os.write(temptxt_fd, bytes(txt, 'UTF8')) + except TypeError: # oops, fall back to Python2 + os.write(temptxt_fd, txt) + os.close(temptxt_fd) + txt = '@' + temptxt + else: + # use a file instead of a text. + txt = "@%" + filename + + if size is not None: + size = ('' if size[0] is None else str(size[0]), + '' if size[1] is None else str(size[1])) + + cmd = ([get_setting("IMAGEMAGICK_BINARY"), + "-background", bg_color, + "-fill", color, + "-font", font]) + + if fontsize is not None: + cmd += ["-pointsize", "%d" % fontsize] + if kerning is not None: + cmd += ["-kerning", "%0.1f" % kerning] + if stroke_color is not None: + cmd += ["-stroke", stroke_color, "-strokewidth", + "%.01f" % stroke_width] + if size is not None: + cmd += ["-size", "%sx%s" % (size[0], size[1])] + if align is not None: + cmd += ["-gravity", align] + if interline is not None: + cmd += ["-interline-spacing", "%d" % interline] + + if tempfilename is None: + tempfile_fd, tempfilename = tempfile.mkstemp(suffix='.png') + os.close(tempfile_fd) + + cmd += ["%s:%s" % (method, txt), + "-type", "truecolormatte", "PNG32:%s" % tempfilename] + + if print_cmd: + print(" ".join(cmd)) + + try: + subprocess_call(cmd, logger=None) + except (IOError, OSError) as err: + error = ("MoviePy Error: creation of %s failed because of the " + "following error:\n\n%s.\n\n." % (filename, str(err)) + + ("This error can be due to the fact that ImageMagick " + "is not installed on your computer, or (for Windows " + "users) that you didn't specify the path to the " + "ImageMagick binary in file conf.py, or that the path " + "you specified is incorrect")) + raise IOError(error) + + ImageClip.__init__(self, tempfilename, transparent=transparent) + self.txt = txt + self.color = color + self.stroke_color = stroke_color + + if remove_temp: + if os.path.exists(tempfilename): + os.remove(tempfilename) + if os.path.exists(temptxt): + os.remove(temptxt) + + @staticmethod + def list(arg): + """Returns the list of all valid entries for the argument of + ``TextClip`` given (can be ``font``, ``color``, etc...) """ + + popen_params = {"stdout": sp.PIPE, + "stderr": DEVNULL, + "stdin": DEVNULL} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + process = sp.Popen([get_setting("IMAGEMAGICK_BINARY"), + '-list', arg], **popen_params) + result = process.communicate()[0] + lines = result.splitlines() + + if arg == 'font': + return [l.decode('UTF-8')[8:] for l in lines if l.startswith(b" Font:")] + elif arg == 'color': + return [l.split(b" ")[0] for l in lines[2:]] + else: + raise Exception("Moviepy:Error! Argument must equal " + "'font' or 'color'") + + @staticmethod + def search(string, arg): + """Returns the of all valid entries which contain ``string`` for the + argument ``arg`` of ``TextClip``, for instance + + >>> # Find all the available fonts which contain "Courier" + >>> print ( TextClip.search('Courier', 'font') ) + + """ + string = string.lower() + names_list = TextClip.list(arg) + return [name for name in names_list if string in name.lower()] diff --git a/.venv/Lib/site-packages/moviepy/video/__init__.py b/.venv/Lib/site-packages/moviepy/video/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/moviepy/video/__pycache__/VideoClip.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/__pycache__/VideoClip.cpython-311.pyc new file mode 100644 index 00000000..edd0dade Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/__pycache__/VideoClip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..b09eb3eb Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/CompositeVideoClip.py b/.venv/Lib/site-packages/moviepy/video/compositing/CompositeVideoClip.py new file mode 100644 index 00000000..080cd377 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/compositing/CompositeVideoClip.py @@ -0,0 +1,176 @@ +import numpy as np + +from moviepy.audio.AudioClip import CompositeAudioClip +from moviepy.video.VideoClip import ColorClip, VideoClip + +# CompositeVideoClip + +class CompositeVideoClip(VideoClip): + + """ + + A VideoClip made of other videoclips displayed together. This is the + base class for most compositions. + + Parameters + ---------- + + size + The size (height x width) of the final clip. + + clips + A list of videoclips. Each clip of the list will + be displayed below the clips appearing after it in the list. + For each clip: + + - The attribute ``pos`` determines where the clip is placed. + See ``VideoClip.set_pos`` + - The mask of the clip determines which parts are visible. + + Finally, if all the clips in the list have their ``duration`` + attribute set, then the duration of the composite video clip + is computed automatically + + bg_color + Color for the unmasked and unfilled regions. Set to None for these + regions to be transparent (will be slower). + + use_bgclip + Set to True if the first clip in the list should be used as the + 'background' on which all other clips are blitted. That first clip must + have the same size as the final clip. If it has no transparency, the final + clip will have no mask. + + The clip with the highest FPS will be the FPS of the composite clip. + + """ + + def __init__(self, clips, size=None, bg_color=None, use_bgclip=False, + ismask=False): + + if size is None: + size = clips[0].size + + + if use_bgclip and (clips[0].mask is None): + transparent = False + else: + transparent = (bg_color is None) + + if bg_color is None: + bg_color = 0.0 if ismask else (0, 0, 0) + + fpss = [c.fps for c in clips if getattr(c, 'fps', None)] + self.fps = max(fpss) if fpss else None + + VideoClip.__init__(self) + + self.size = size + self.ismask = ismask + self.clips = clips + self.bg_color = bg_color + + if use_bgclip: + self.bg = clips[0] + self.clips = clips[1:] + self.created_bg = False + else: + self.clips = clips + self.bg = ColorClip(size, color=self.bg_color) + self.created_bg = True + + + # compute duration + ends = [c.end for c in self.clips] + if None not in ends: + duration = max(ends) + self.duration = duration + self.end = duration + + # compute audio + audioclips = [v.audio for v in self.clips if v.audio is not None] + if audioclips: + self.audio = CompositeAudioClip(audioclips) + + # compute mask if necessary + if transparent: + maskclips = [(c.mask if (c.mask is not None) else + c.add_mask().mask).set_position(c.pos) + .set_end(c.end).set_start(c.start, change_end=False) + for c in self.clips] + + self.mask = CompositeVideoClip(maskclips,self.size, ismask=True, + bg_color=0.0) + + def make_frame(t): + """ The clips playing at time `t` are blitted over one + another. """ + + f = self.bg.get_frame(t) + for c in self.playing_clips(t): + f = c.blit_on(f, t) + return f + + self.make_frame = make_frame + + def playing_clips(self, t=0): + """ Returns a list of the clips in the composite clips that are + actually playing at the given time `t`. """ + return [c for c in self.clips if c.is_playing(t)] + + def close(self): + if self.created_bg and self.bg: + # Only close the background clip if it was locally created. + # Otherwise, it remains the job of whoever created it. + self.bg.close() + self.bg = None + if hasattr(self, "audio") and self.audio: + self.audio.close() + self.audio = None + + + +def clips_array(array, rows_widths=None, cols_widths=None, + bg_color = None): + + """ + + rows_widths + widths of the different rows in pixels. If None, is set automatically. + + cols_widths + widths of the different colums in pixels. If None, is set automatically. + + cols_widths + + bg_color + Fill color for the masked and unfilled regions. Set to None for these + regions to be transparent (will be slower). + + """ + + array = np.array(array) + sizes_array = np.array([[c.size for c in line] for line in array]) + + # find row width and col_widths automatically if not provided + if rows_widths is None: + rows_widths = sizes_array[:,:,1].max(axis=1) + if cols_widths is None: + cols_widths = sizes_array[:,:,0].max(axis=0) + + xx = np.cumsum([0]+list(cols_widths)) + yy = np.cumsum([0]+list(rows_widths)) + + for j, (x, cw) in enumerate(zip(xx[:-1], cols_widths)): + for i, (y, rw) in enumerate(zip(yy[:-1], rows_widths)): + clip = array[i, j] + w, h = clip.size + if (w < cw) or (h < rw): + clip = (CompositeVideoClip([clip.set_position('center')], + size = (cw,rw), + bg_color = bg_color). + set_duration(clip.duration)) + + array[i, j] = clip.set_position((x, y)) + + return CompositeVideoClip(array.flatten(), size=(xx[-1], yy[-1]), bg_color=bg_color) diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/__init__.py b/.venv/Lib/site-packages/moviepy/video/compositing/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/CompositeVideoClip.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/CompositeVideoClip.cpython-311.pyc new file mode 100644 index 00000000..04eb2948 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/CompositeVideoClip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..1bc26574 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/concatenate.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/concatenate.cpython-311.pyc new file mode 100644 index 00000000..eeacbba4 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/concatenate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/on_color.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/on_color.cpython-311.pyc new file mode 100644 index 00000000..aeeec619 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/on_color.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/positioning.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/positioning.cpython-311.pyc new file mode 100644 index 00000000..875f4fc3 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/positioning.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/transitions.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/transitions.cpython-311.pyc new file mode 100644 index 00000000..e624ec89 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/compositing/__pycache__/transitions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/concatenate.py b/.venv/Lib/site-packages/moviepy/video/compositing/concatenate.py new file mode 100644 index 00000000..53526087 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/compositing/concatenate.py @@ -0,0 +1,121 @@ +import numpy as np + +from moviepy.audio.AudioClip import CompositeAudioClip +from moviepy.tools import deprecated_version_of +from moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip +from moviepy.video.compositing.on_color import on_color +from moviepy.video.VideoClip import ColorClip, VideoClip + +try: # Python 2 + reduce +except NameError: # Python 3 + from functools import reduce + + +def concatenate_videoclips(clips, method="chain", transition=None, + bg_color=None, ismask=False, padding = 0): + """ Concatenates several video clips + + Returns a video clip made by clip by concatenating several video clips. + (Concatenated means that they will be played one after another). + + There are two methods: + + - method="chain": will produce a clip that simply outputs + the frames of the succesive clips, without any correction if they are + not of the same size of anything. If none of the clips have masks the + resulting clip has no mask, else the mask is a concatenation of masks + (using completely opaque for clips that don't have masks, obviously). + If you have clips of different size and you want to write directly the + result of the concatenation to a file, use the method "compose" instead. + + - method="compose", if the clips do not have the same + resolution, the final resolution will be such that no clip has + to be resized. + As a consequence the final clip has the height of the highest + clip and the width of the widest clip of the list. All the + clips with smaller dimensions will appear centered. The border + will be transparent if mask=True, else it will be of the + color specified by ``bg_color``. + + The clip with the highest FPS will be the FPS of the result clip. + + Parameters + ----------- + clips + A list of video clips which must all have their ``duration`` + attributes set. + method + "chain" or "compose": see above. + transition + A clip that will be played between each two clips of the list. + + bg_color + Only for method='compose'. Color of the background. + Set to None for a transparent clip + + padding + Only for method='compose'. Duration during two consecutive clips. + Note that for negative padding, a clip will partly play at the same + time as the clip it follows (negative padding is cool for clips who fade + in on one another). A non-null padding automatically sets the method to + `compose`. + + """ + + if transition is not None: + l = [[v, transition] for v in clips[:-1]] + clips = reduce(lambda x, y: x + y, l) + [clips[-1]] + transition = None + + tt = np.cumsum([0] + [c.duration for c in clips]) + + sizes = [v.size for v in clips] + + w = max(r[0] for r in sizes) + h = max(r[1] for r in sizes) + + tt = np.maximum(0, tt + padding * np.arange(len(tt))) + + if method == "chain": + def make_frame(t): + i = max([i for i, e in enumerate(tt) if e <= t]) + return clips[i].get_frame(t - tt[i]) + + def get_mask(c): + mask = c.mask or ColorClip([1, 1], color=1, ismask=True) + if mask.duration is None: + mask.duration = c.duration + return mask + + result = VideoClip(ismask = ismask, make_frame = make_frame) + if any([c.mask is not None for c in clips]): + masks = [get_mask(c) for c in clips] + result.mask = concatenate_videoclips(masks, method="chain", + ismask=True) + result.clips = clips + elif method == "compose": + result = CompositeVideoClip( [c.set_start(t).set_position('center') + for (c, t) in zip(clips, tt)], + size = (w, h), bg_color=bg_color, ismask=ismask) + else: + raise Exception("Moviepy Error: The 'method' argument of " + "concatenate_videoclips must be 'chain' or 'compose'") + + result.tt = tt + + result.start_times = tt[:-1] + result.start, result.duration, result.end = 0, tt[-1] , tt[-1] + + audio_t = [(c.audio, t) for c, t in zip(clips,tt) if c.audio is not None] + if audio_t: + result.audio = CompositeAudioClip([a.set_start(t) + for a,t in audio_t]) + + fpss = [c.fps for c in clips if getattr(c, 'fps', None) is not None] + result.fps = max(fpss) if fpss else None + return result + + +concatenate = deprecated_version_of(concatenate_videoclips, + oldname="concatenate") diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/on_color.py b/.venv/Lib/site-packages/moviepy/video/compositing/on_color.py new file mode 100644 index 00000000..c5fcb147 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/compositing/on_color.py @@ -0,0 +1,27 @@ +from moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip +from moviepy.video.VideoClip import ColorClip + + +def on_color(clip, size=None, color=(0, 0, 0), pos=None, col_opacity=None): + """ + Returns a clip made of the current clip overlaid on a color + clip of a possibly bigger size. Can serve to flatten transparent + clips (ideal for previewing clips with masks). + + :param size: size of the final clip. By default it will be the + size of the current clip. + :param bg_color: the background color of the final clip + :param pos: the position of the clip in the final clip. + :param col_opacity: should the added zones be transparent ? + """ + + if size is None: + size = clip.size + if pos is None: + pos = 'center' + colorclip = ColorClip(size, color=color) + if col_opacity: + colorclip = colorclip.with_mask().set_opacity(col_opacity) + + return CompositeVideoClip([colorclip, clip.set_position(pos)], + transparent=(col_opacity is not None)) diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/positioning.py b/.venv/Lib/site-packages/moviepy/video/compositing/positioning.py new file mode 100644 index 00000000..910e48b5 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/compositing/positioning.py @@ -0,0 +1,5 @@ +""" +This module provides classes that make positioning easy +""" + +# class ClipPosition: diff --git a/.venv/Lib/site-packages/moviepy/video/compositing/transitions.py b/.venv/Lib/site-packages/moviepy/video/compositing/transitions.py new file mode 100644 index 00000000..db381512 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/compositing/transitions.py @@ -0,0 +1,126 @@ +""" +Here is the current catalogue. These are meant +to be used with clip.fx. There are available as transfx.crossfadein etc. +if you load them with ``from moviepy.editor import *`` +""" + +from moviepy.decorators import add_mask_if_none, requires_duration +from moviepy.video.fx.fadein import fadein +from moviepy.video.fx.fadeout import fadeout + +from .CompositeVideoClip import CompositeVideoClip + + +@requires_duration +@add_mask_if_none +def crossfadein(clip, duration): + """ Makes the clip appear progressively, over ``duration`` seconds. + Only works when the clip is included in a CompositeVideoClip. + """ + clip.mask.duration = clip.duration + newclip = clip.copy() + newclip.mask = clip.mask.fx(fadein, duration) + return newclip + + +@requires_duration +@add_mask_if_none +def crossfadeout(clip, duration): + """ Makes the clip disappear progressively, over ``duration`` seconds. + Only works when the clip is included in a CompositeVideoClip. + """ + clip.mask.duration = clip.duration + newclip = clip.copy() + newclip.mask = clip.mask.fx(fadeout, duration) + return newclip + + +def slide_in(clip, duration, side): + """ Makes the clip arrive from one side of the screen. + + Only works when the clip is included in a CompositeVideoClip, + and if the clip has the same size as the whole composition. + + Parameters + =========== + + clip + A video clip. + + duration + Time taken for the clip to be fully visible + + side + Side of the screen where the clip comes from. One of + 'top' | 'bottom' | 'left' | 'right' + + Examples + ========= + + >>> from moviepy.editor import * + >>> clips = [... make a list of clips] + >>> slided_clips = [CompositeVideoClip([ + clip.fx(transfx.slide_in, duration=1, side='left')]) + for clip in clips] + >>> final_clip = concatenate( slided_clips, padding=-1) + + """ + w, h = clip.size + pos_dict = {'left': lambda t: (min(0, w*(t/duration-1)), 'center'), + 'right': lambda t: (max(0, w*(1-t/duration)), 'center'), + 'top': lambda t: ('center', min(0, h*(t/duration-1))), + 'bottom': lambda t: ('center', max(0, h*(1-t/duration)))} + + return clip.set_position(pos_dict[side]) + + +@requires_duration +def slide_out(clip, duration, side): + """ Makes the clip go away by one side of the screen. + + Only works when the clip is included in a CompositeVideoClip, + and if the clip has the same size as the whole composition. + + Parameters + =========== + + clip + A video clip. + + duration + Time taken for the clip to fully disappear. + + side + Side of the screen where the clip goes. One of + 'top' | 'bottom' | 'left' | 'right' + + Examples + ========= + + >>> from moviepy.editor import * + >>> clips = [... make a list of clips] + >>> slided_clips = [CompositeVideoClip([ + clip.fx(transfx.slide_out, duration=1, side='left')]) + for clip in clips] + >>> final_clip = concatenate( slided_clips, padding=-1) + + """ + + w,h = clip.size + ts = clip.duration - duration # start time of the effect. + pos_dict = {'left' : lambda t: (min(0,w*(-(t-ts)/duration)),'center'), + 'right' : lambda t: (max(0,w*((t-ts)/duration)),'center'), + 'top' : lambda t: ('center',min(0,h*(-(t-ts)/duration))), + 'bottom': lambda t: ('center',max(0,h*((t-ts)/duration))) } + + return clip.set_position(pos_dict[side]) + + +@requires_duration +def make_loopable(clip, cross_duration): + """ Makes the clip fade in progressively at its own end, this way + it can be looped indefinitely. ``cross`` is the duration in seconds + of the fade-in. """ + d = clip.duration + clip2 = clip.fx(crossfadein, cross_duration).set_start(d - cross_duration) + return CompositeVideoClip([clip, clip2]).subclip(cross_duration, d) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__init__.py b/.venv/Lib/site-packages/moviepy/video/fx/__init__.py new file mode 100644 index 00000000..67fe3b19 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/__init__.py @@ -0,0 +1,4 @@ +""" +This module contains transformation functions (clip->clip) +One file for one fx. The file's name is the fx's name +""" diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..5d757b92 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/accel_decel.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/accel_decel.cpython-311.pyc new file mode 100644 index 00000000..3a159d52 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/accel_decel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/blackwhite.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/blackwhite.cpython-311.pyc new file mode 100644 index 00000000..a93fc43c Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/blackwhite.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/blink.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/blink.cpython-311.pyc new file mode 100644 index 00000000..5a832b1c Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/blink.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/colorx.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/colorx.cpython-311.pyc new file mode 100644 index 00000000..ca67819e Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/colorx.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/crop.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/crop.cpython-311.pyc new file mode 100644 index 00000000..f3a89cf0 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/crop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/even_size.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/even_size.cpython-311.pyc new file mode 100644 index 00000000..9f9432d4 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/even_size.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/fadein.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/fadein.cpython-311.pyc new file mode 100644 index 00000000..fa24871c Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/fadein.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/fadeout.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/fadeout.cpython-311.pyc new file mode 100644 index 00000000..e83dff7f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/fadeout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/freeze.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/freeze.cpython-311.pyc new file mode 100644 index 00000000..ef963f9d Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/freeze.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/freeze_region.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/freeze_region.cpython-311.pyc new file mode 100644 index 00000000..30f28a9e Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/freeze_region.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/gamma_corr.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/gamma_corr.cpython-311.pyc new file mode 100644 index 00000000..ef3781a3 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/gamma_corr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/headblur.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/headblur.cpython-311.pyc new file mode 100644 index 00000000..8a8b261e Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/headblur.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/invert_colors.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/invert_colors.cpython-311.pyc new file mode 100644 index 00000000..c93f7059 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/invert_colors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/loop.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/loop.cpython-311.pyc new file mode 100644 index 00000000..dd5beb93 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/loop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/lum_contrast.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/lum_contrast.cpython-311.pyc new file mode 100644 index 00000000..77f76346 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/lum_contrast.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/make_loopable.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/make_loopable.cpython-311.pyc new file mode 100644 index 00000000..423a02a0 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/make_loopable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/margin.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/margin.cpython-311.pyc new file mode 100644 index 00000000..83fec236 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/margin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_and.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_and.cpython-311.pyc new file mode 100644 index 00000000..83f35e9c Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_and.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_color.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_color.cpython-311.pyc new file mode 100644 index 00000000..894014dd Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_color.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_or.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_or.cpython-311.pyc new file mode 100644 index 00000000..b3805a0c Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mask_or.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mirror_x.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mirror_x.cpython-311.pyc new file mode 100644 index 00000000..0b0c96dd Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mirror_x.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mirror_y.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mirror_y.cpython-311.pyc new file mode 100644 index 00000000..c359901a Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/mirror_y.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/painting.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/painting.cpython-311.pyc new file mode 100644 index 00000000..fc8ba203 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/painting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/resize.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/resize.cpython-311.pyc new file mode 100644 index 00000000..89aa6c13 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/resize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/rotate.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/rotate.cpython-311.pyc new file mode 100644 index 00000000..4a3d9c99 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/rotate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/scroll.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/scroll.cpython-311.pyc new file mode 100644 index 00000000..7db99db9 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/scroll.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/speedx.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/speedx.cpython-311.pyc new file mode 100644 index 00000000..776db3a9 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/speedx.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/supersample.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/supersample.cpython-311.pyc new file mode 100644 index 00000000..3e9a06d9 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/supersample.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/time_mirror.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/time_mirror.cpython-311.pyc new file mode 100644 index 00000000..084a99c0 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/time_mirror.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/time_symmetrize.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/time_symmetrize.cpython-311.pyc new file mode 100644 index 00000000..75d4bc15 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/fx/__pycache__/time_symmetrize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/fx/accel_decel.py b/.venv/Lib/site-packages/moviepy/video/fx/accel_decel.py new file mode 100644 index 00000000..6fbb4cca --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/accel_decel.py @@ -0,0 +1,44 @@ +def f_accel_decel(t, old_d, new_d, abruptness=1, soonness=1.0): + """ + abruptness + negative abruptness (>-1): speed up down up + zero abruptness : no effect + positive abruptness: speed down up down + + soonness + for positive abruptness, determines how soon the + speedup occurs (0=.5)*f2(t) + + return old_d*_f((t/new_d)**soonness) + + +def accel_decel(clip, new_duration=None, abruptness=1.0, soonness=1.0): + """ + + new_duration + If None, will be that of the current clip. + + abruptness + negative abruptness (>-1): speed up down up + zero abruptness : no effect + positive abruptness: speed down up down + + soonness + for positive abruptness, determines how soon the + speedup occurs (0>> crop(clip, x1=50, y1=60, x2=460, y2=275) + + Only remove the part above y=30: + + >>> crop(clip, y1=30) + + Crop a rectangle that starts 10 pixels left and is 200px wide + + >>> crop(clip, x1=10, width=200) + + Crop a rectangle centered in x,y=(300,400), width=50, height=150 : + + >>> crop(clip, x_center=300 , y_center=400, + width=50, height=150) + + Any combination of the above should work, like for this rectangle + centered in x=300, with explicit y-boundaries: + + >>> crop(x_center=300, width=400, y1=100, y2=600) + + """ + + if width and x1 is not None: + x2 = x1 + width + elif width and x2 is not None: + x1 = x2 - width + + if height and y1 is not None: + y2 = y1 + height + elif height and y2 is not None: + y1 = y2 - height + + if x_center: + x1, x2 = x_center - width / 2, x_center + width / 2 + + if y_center: + y1, y2 = y_center - height / 2, y_center + height / 2 + + x1 = x1 or 0 + y1 = y1 or 0 + x2 = x2 or clip.size[0] + y2 = y2 or clip.size[1] + + return clip.fl_image(lambda pic: pic[int(y1) : int(y2), int(x1) : int(x2)], apply_to=["mask"]) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/even_size.py b/.venv/Lib/site-packages/moviepy/video/fx/even_size.py new file mode 100644 index 00000000..c7290b8e --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/even_size.py @@ -0,0 +1,22 @@ +from moviepy.decorators import apply_to_mask + + +@apply_to_mask +def even_size(clip): + """ + Crops the clip to make dimensions even. + """ + w, h = clip.size + w_even = w % 2 == 0 + h_even = h % 2 == 0 + if w_even and h_even: + return clip + + if not w_even and not h_even: + fl_image = lambda a : a[:-1,:-1,:] + elif w_even: + fl_image = lambda a : a[:,:-1,:] + else: + fl_image = lambda a : a[:-1,:,:] + + return clip.fl_image(fl_image) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/fadein.py b/.venv/Lib/site-packages/moviepy/video/fx/fadein.py new file mode 100644 index 00000000..c9fda925 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/fadein.py @@ -0,0 +1,25 @@ +import numpy as np + + +def fadein(clip, duration, initial_color=None): + """ + Makes the clip progressively appear from some color (black by default), + over ``duration`` seconds at the beginning of the clip. Can be used for + masks too, where the initial color must be a number between 0 and 1. + For cross-fading (progressive appearance or disappearance of a clip + over another clip, see ``composition.crossfade`` + """ + + if initial_color is None: + initial_color = 0 if clip.ismask else [0,0,0] + + initial_color = np.array(initial_color) + + def fl(gf, t): + if t>=duration: + return gf(t) + else: + fading = (1.0*t/duration) + return fading*gf(t) + (1-fading)*initial_color + + return clip.fl(fl) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/fadeout.py b/.venv/Lib/site-packages/moviepy/video/fx/fadeout.py new file mode 100644 index 00000000..66c7230a --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/fadeout.py @@ -0,0 +1,28 @@ +import numpy as np + +from moviepy.decorators import requires_duration + + +@requires_duration +def fadeout(clip, duration, final_color=None): + """ + Makes the clip progressively fade to some color (black by default), + over ``duration`` seconds at the end of the clip. Can be used for + masks too, where the final color must be a number between 0 and 1. + For cross-fading (progressive appearance or disappearance of a clip + over another clip, see ``composition.crossfade`` + """ + + if final_color is None: + final_color = 0 if clip.ismask else [0,0,0] + + final_color = np.array(final_color) + + def fl(gf, t): + if (clip.duration-t)>=duration: + return gf(t) + else: + fading = 1.0 * (clip.duration - t) / duration + return fading*gf(t) + (1-fading)*final_color + + return clip.fl(fl) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/freeze.py b/.venv/Lib/site-packages/moviepy/video/fx/freeze.py new file mode 100644 index 00000000..ff4d4aa8 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/freeze.py @@ -0,0 +1,29 @@ +from moviepy.decorators import requires_duration +from moviepy.video.compositing.concatenate import concatenate_videoclips +from moviepy.video.VideoClip import ImageClip + + +@requires_duration +def freeze(clip, t=0, freeze_duration=None, total_duration=None, + padding_end=0): + """ Momentarily freeze the clip at time t. + + Set `t='end'` to freeze the clip at the end (actually it will freeze on the + frame at time clip.duration - padding_end seconds). + With ``duration``you can specify the duration of the freeze. + With ``total_duration`` you can specify the total duration of + the clip and the freeze (i.e. the duration of the freeze is + automatically calculated). One of them must be provided. + """ + + if t=='end': + t = clip.duration - padding_end + + if freeze_duration is None: + freeze_duration = total_duration - clip.duration + + before = [clip.subclip(0,t)] if (t!=0) else [] + freeze = [clip.to_ImageClip(t).set_duration(freeze_duration)] + after = [clip.subclip(t)] if (t !=clip.duration) else [] + return concatenate_videoclips(before + freeze + after) + \ No newline at end of file diff --git a/.venv/Lib/site-packages/moviepy/video/fx/freeze_region.py b/.venv/Lib/site-packages/moviepy/video/fx/freeze_region.py new file mode 100644 index 00000000..a8ec7cf0 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/freeze_region.py @@ -0,0 +1,57 @@ +from moviepy.decorators import apply_to_mask +from moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip + +from .crop import crop + + +#@apply_to_mask +def freeze_region(clip, t=0, region=None, outside_region=None, mask=None): + """ Freezes one region of the clip while the rest remains animated. + + You can choose one of three methods by providing either `region`, + `outside_region`, or `mask`. + + Parameters + ----------- + + t + Time at which to freeze the freezed region. + + region + A tuple (x1, y1, x2, y2) defining the region of the screen (in pixels) + which will be freezed. You can provide outside_region or mask instead. + + outside_region + A tuple (x1, y1, x2, y2) defining the region of the screen (in pixels) + which will be the only non-freezed region. + + mask + If not None, will overlay a freezed version of the clip on the current clip, + with the provided mask. In other words, the "visible" pixels in the mask + indicate the freezed region in the final picture. + + """ + + if region is not None: + + x1, y1, x2, y2 = region + freeze = (clip.fx(crop, *region) + .to_ImageClip(t=t) + .set_duration(clip.duration) + .set_position((x1,y1))) + return CompositeVideoClip([clip, freeze]) + + elif outside_region is not None: + + x1, y1, x2, y2 = outside_region + animated_region = (clip.fx(crop, *outside_region) + .set_position((x1,y1))) + freeze = (clip.to_ImageClip(t=t) + .set_duration(clip.duration)) + return CompositeVideoClip([freeze, animated_region]) + + elif mask is not None: + freeze = (clip.to_ImageClip(t=t) + .set_duration(clip.duration) + .set_mask(mask)) + return CompositeVideoClip([clip, freeze]) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/gamma_corr.py b/.venv/Lib/site-packages/moviepy/video/fx/gamma_corr.py new file mode 100644 index 00000000..5668698a --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/gamma_corr.py @@ -0,0 +1,8 @@ + +def gamma_corr(clip, gamma): + """ Gamma-correction of a video clip """ + def fl(im): + corrected = (255*(1.0*im/255)**gamma) + return corrected.astype('uint8') + + return clip.fl_image(fl) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/headblur.py b/.venv/Lib/site-packages/moviepy/video/fx/headblur.py new file mode 100644 index 00000000..2d46d5a7 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/headblur.py @@ -0,0 +1,58 @@ +import numpy as np + +#------- CHECKING DEPENDENCIES ----------------------------------------- +try: + import cv2 + headblur_possible = True + if cv2.__version__ >= '3.0.0': + cv2.CV_AA=cv2.LINE_AA +except: + headblur_possible = False +#----------------------------------------------------------------------- + + +def headblur(clip,fx,fy,r_zone,r_blur=None): + """ + Returns a filter that will blurr a moving part (a head ?) of + the frames. The position of the blur at time t is + defined by (fx(t), fy(t)), the radius of the blurring + by ``r_zone`` and the intensity of the blurring by ``r_blur``. + Requires OpenCV for the circling and the blurring. + Automatically deals with the case where part of the image goes + offscreen. + """ + + if r_blur is None: r_blur = 2*r_zone/3 + + def fl(gf,t): + + im = gf(t) + h,w,d = im.shape + x,y = int(fx(t)),int(fy(t)) + x1,x2 = max(0,x-r_zone),min(x+r_zone,w) + y1,y2 = max(0,y-r_zone),min(y+r_zone,h) + region_size = y2-y1,x2-x1 + + mask = np.zeros(region_size).astype('uint8') + cv2.circle(mask, (r_zone,r_zone), r_zone, 255, -1, + lineType=cv2.CV_AA) + + mask = np.dstack(3*[(1.0/255)*mask]) + + orig = im[y1:y2, x1:x2] + blurred = cv2.blur(orig,(r_blur, r_blur)) + im[y1:y2, x1:x2] = mask*blurred + (1-mask)*orig + return im + + return clip.fl(fl) + + + +#------- OVERWRITE IF REQUIREMENTS NOT MET ----------------------------- +if not headblur_possible: + doc = headblur.__doc__ + def headblur(clip,fx,fy,r_zone,r_blur=None): + raise IOError("fx painting needs opencv") + + headblur.__doc__ = doc +#----------------------------------------------------------------------- diff --git a/.venv/Lib/site-packages/moviepy/video/fx/invert_colors.py b/.venv/Lib/site-packages/moviepy/video/fx/invert_colors.py new file mode 100644 index 00000000..0fb948eb --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/invert_colors.py @@ -0,0 +1,8 @@ +def invert_colors(clip): + """ Returns the color-inversed clip. + + The values of all pixels are replaced with (255-v) or (1-v) for masks + Black becomes white, green becomes purple, etc. + """ + maxi = (1.0 if clip.ismask else 255) + return clip.fl_image(lambda f : maxi - f) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/loop.py b/.venv/Lib/site-packages/moviepy/video/fx/loop.py new file mode 100644 index 00000000..b0c04beb --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/loop.py @@ -0,0 +1,26 @@ +from moviepy.decorators import apply_to_audio, apply_to_mask, requires_duration + + +@requires_duration +@apply_to_mask +@apply_to_audio +def loop(self, n=None, duration=None): + """ + Returns a clip that plays the current clip in an infinite loop. + Ideal for clips coming from gifs. + + Parameters + ------------ + n + Number of times the clip should be played. If `None` the + the clip will loop indefinitely (i.e. with no set duration). + + duration + Total duration of the clip. Can be specified instead of n. + """ + result = self.fl_time(lambda t: t % self.duration) + if n: + duration = n*self.duration + if duration: + result = result.set_duration(duration) + return result diff --git a/.venv/Lib/site-packages/moviepy/video/fx/lum_contrast.py b/.venv/Lib/site-packages/moviepy/video/fx/lum_contrast.py new file mode 100644 index 00000000..a1ae55cf --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/lum_contrast.py @@ -0,0 +1,11 @@ +def lum_contrast(clip, lum = 0, contrast=0, contrast_thr=127): + """ luminosity-contrast correction of a clip """ + + def fl_image(im): + im = 1.0*im # float conversion + corrected = im + lum + contrast*(im-float(contrast_thr)) + corrected[corrected < 0] = 0 + corrected[corrected > 255] = 255 + return corrected.astype('uint8') + + return clip.fl_image(fl_image) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/make_loopable.py b/.venv/Lib/site-packages/moviepy/video/fx/make_loopable.py new file mode 100644 index 00000000..485887c7 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/make_loopable.py @@ -0,0 +1,14 @@ +import moviepy.video.compositing.transitions as transfx +from moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip + + +def make_loopable(clip, cross): + """ + Makes the clip fade in progressively at its own end, this way + it can be looped indefinitely. ``cross`` is the duration in seconds + of the fade-in. """ + d = clip.duration + clip2 = clip.fx(transfx.crossfadein, cross).\ + set_start(d - cross) + return CompositeVideoClip([ clip, clip2 ]).\ + subclip(cross,d) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/margin.py b/.venv/Lib/site-packages/moviepy/video/fx/margin.py new file mode 100644 index 00000000..20dd1930 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/margin.py @@ -0,0 +1,58 @@ +import numpy as np + +from moviepy.decorators import apply_to_mask +from moviepy.video.VideoClip import ImageClip + + +@apply_to_mask +def margin(clip, mar=None, left=0, right=0, top=0, + bottom=0, color=(0, 0, 0), opacity = 1.0): + """ + Draws an external margin all around the frame. + + :param mar: if not ``None``, then the new clip has a margin of + size ``mar`` in pixels on the left, right, top, and bottom. + + :param left, right, top, bottom: width of the margin in pixel + in these directions. + + :param color: color of the margin. + + :param mask_margin: value of the mask on the margin. Setting + this value to 0 yields transparent margins. + + """ + + if (opacity != 1.0) and (clip.mask is None) and not (clip.ismask): + clip = clip.add_mask() + + if mar is not None: + left = right = top = bottom = mar + + def make_bg(w,h): + new_w, new_h = w + left + right, h + top + bottom + if clip.ismask: + shape = (new_h, new_w) + bg = ( np.tile(opacity, (new_h, new_w)) + .astype(float) + .reshape(shape)) + else: + shape = (new_h, new_w, 3) + bg = np.tile(color, (new_h, new_w)).reshape(shape) + return bg + + if isinstance(clip, ImageClip): + + im = make_bg(clip.w,clip.h) + im[top:top + clip.h, left:left + clip.w] = clip.img + return clip.fl_image(lambda pic:im) + + else: + + def fl(gf, t): + pic = gf(t) + h,w = pic.shape[:2] + im = make_bg(w,h) + im[top:top + h, left:left + w] = pic + return im + return clip.fl(fl) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/mask_and.py b/.venv/Lib/site-packages/moviepy/video/fx/mask_and.py new file mode 100644 index 00000000..ccec602a --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/mask_and.py @@ -0,0 +1,20 @@ +import numpy as np + +from ..VideoClip import ImageClip + + +def mask_and(clip, other_clip): + """ Returns the logical 'and' (min) between two masks. + other_clip can be a mask clip or a picture (np.array). + The result has the duration of 'clip' (if it has any) + """ + + # To ensure that 'or' of two ImageClips will be an ImageClip. + if isinstance(other_clip, ImageClip): + other_clip = other_clip.img + + if isinstance(other_clip, np.ndarray): + return clip.fl_image(lambda f : np.minimum(f, other_clip)) + else: + return clip.fl(lambda gf, t : np.minimum(gf(t), + other_clip.get_frame(t))) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/mask_color.py b/.venv/Lib/site-packages/moviepy/video/fx/mask_color.py new file mode 100644 index 00000000..fba87dda --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/mask_color.py @@ -0,0 +1,34 @@ +import numpy as np + + +def mask_color(clip, color=None, thr=0, s=1): + """ Returns a new clip with a mask for transparency where the original + clip is of the given color. + + You can also have a "progressive" mask by specifying a non-nul distance + threshold thr. In this case, if the distance between a pixel and the given + color is d, the transparency will be + + d**s / (thr**s + d**s) + + which is 1 when d>>thr and 0 for d< pic.shape[1] or ly > pic.shape[0]: + # For upsizing use linear for good quality & decent speed + interpolation = cv2.INTER_LINEAR + else: + # For dowsizing use area to prevent aliasing + interpolation = cv2.INTER_AREA + return cv2.resize(+pic.astype('uint8'), (lx, ly), + interpolation=interpolation) + + resizer.origin = "cv2" + +except ImportError: + + + try: + # TRY USING PIL/PILLOW AS RESIZER + from PIL import Image + import numpy as np + def resizer(pic, newsize): + newsize = list(map(int, newsize))[::-1] + shape = pic.shape + if len(shape)==3: + newshape = (newsize[0],newsize[1], shape[2] ) + else: + newshape = (newsize[0],newsize[1]) + + pilim = Image.fromarray(pic) + resized_pil = pilim.resize(newsize[::-1], Image.ANTIALIAS) + #arr = np.fromstring(resized_pil.tostring(), dtype='uint8') + #arr.reshape(newshape) + return np.array(resized_pil) + + resizer.origin = "PIL" + + except ImportError: + # TRY USING SCIPY AS RESIZER + try: + from scipy.misc import imresize + resizer = lambda pic, newsize : imresize(pic, + map(int, newsize[::-1])) + resizer.origin = "Scipy" + + except ImportError: + resize_possible = False + + + + +from moviepy.decorators import apply_to_mask + + +def resize(clip, newsize=None, height=None, width=None, apply_to_mask=True): + """ + Returns a video clip that is a resized version of the clip. + + Parameters + ------------ + + newsize: + Can be either + - ``(width,height)`` in pixels or a float representing + - A scaling factor, like 0.5 + - A function of time returning one of these. + + width: + width of the new clip in pixel. The height is then computed so + that the width/height ratio is conserved. + + height: + height of the new clip in pixel. The width is then computed so + that the width/height ratio is conserved. + + Examples + ---------- + + >>> myClip.resize( (460,720) ) # New resolution: (460,720) + >>> myClip.resize(0.6) # width and heigth multiplied by 0.6 + >>> myClip.resize(width=800) # height computed automatically. + >>> myClip.resize(lambda t : 1+0.02*t) # slow swelling of the clip + + """ + + w, h = clip.size + + if newsize is not None: + + def trans_newsize(ns): + + if isinstance(ns, (int, float)): + return [ns * w, ns * h] + else: + return ns + + if hasattr(newsize, "__call__"): + + newsize2 = lambda t : trans_newsize(newsize(t)) + + if clip.ismask: + + fun = lambda gf,t: (1.0*resizer((255 * gf(t)).astype('uint8'), + newsize2(t))/255) + else: + + fun = lambda gf,t: resizer(gf(t).astype('uint8'), + newsize2(t)) + + return clip.fl(fun, keep_duration=True, + apply_to= (["mask"] if apply_to_mask else [])) + + else: + + newsize = trans_newsize(newsize) + + + elif height is not None: + + if hasattr(height, "__call__"): + fun = lambda t : 1.0*int(height(t))/h + return resize(clip, fun) + + + else: + + newsize = [w * height / h, height] + + elif width is not None: + + if hasattr(width, "__call__"): + fun = lambda t : 1.0*width(t)/w + return resize(clip, fun) + + newsize = [width, h * width / w] + + + # From here, the resizing is constant (not a function of time), size=newsize + + if clip.ismask: + fl = lambda pic: 1.0*resizer((255 * pic).astype('uint8'), newsize)/255.0 + + else: + fl = lambda pic: resizer(pic.astype('uint8'), newsize) + + newclip = clip.fl_image(fl) + + if apply_to_mask and clip.mask is not None: + newclip.mask = resize(clip.mask, newsize, apply_to_mask=False) + + return newclip + + +if not resize_possible: + + doc = resize.__doc__ + def resize(clip, newsize=None, height=None, width=None): + raise ImportError("fx resize needs OpenCV or Scipy or PIL") + resize.__doc__ = doc diff --git a/.venv/Lib/site-packages/moviepy/video/fx/rotate.py b/.venv/Lib/site-packages/moviepy/video/fx/rotate.py new file mode 100644 index 00000000..da22268d --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/rotate.py @@ -0,0 +1,73 @@ +import numpy as np + +from moviepy.decorators import apply_to_mask + +try: + from PIL import Image + PIL_FOUND = True + def pil_rotater(pic, angle, resample, expand): + return np.array( Image.fromarray(pic).rotate(angle, expand=expand, + resample=resample)) +except ImportError: + PIL_FOUND = False + +def rotate(clip, angle, unit='deg', resample="bicubic", expand=True): + """ + Change unit to 'rad' to define angles as radians. + If the angle is not one of 90, 180, -90, -180 (degrees) there will be + black borders. You can make them transparent with + + >>> newclip = clip.add_mask().rotate(72) + + Parameters + =========== + + clip + A video clip + + angle + Either a value or a function angle(t) representing the angle of rotation + + unit + Unit of parameter `angle` (either `deg` for degrees or `rad` for radians) + + resample + One of "nearest", "bilinear", or "bicubic". + + expand + Only applIf False, the clip will maintain the same True, the clip will be resized so that the whole + """ + + resample = {"bilinear": Image.BILINEAR, + "nearest": Image.NEAREST, + "bicubic": Image.BICUBIC}[resample] + + if not hasattr(angle, '__call__'): + # if angle is a constant, convert to a constant function + a = +angle + angle = lambda t: a + + transpo = [1,0] if clip.ismask else [1,0,2] + + def fl(gf, t): + + a = angle(t) + im = gf(t) + + if unit == 'rad': + a = 360.0*a/(2*np.pi) + + if (a==90) and expand: + return np.transpose(im, axes=transpo)[::-1] + elif (a==-90) and expand: + return np.transpose(im, axes=transpo)[:,::-1] + elif (a in [180, -180]) and expand: + return im[::-1,::-1] + elif not PIL_FOUND: + raise ValueError('Without "Pillow" installed, only angles 90, -90,' + '180 are supported, please install "Pillow" with' + "pip install pillow") + else: + return pil_rotater(im, a, resample=resample, expand=expand) + + return clip.fl(fl, apply_to=["mask"]) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/scroll.py b/.venv/Lib/site-packages/moviepy/video/fx/scroll.py new file mode 100644 index 00000000..34f4f401 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/scroll.py @@ -0,0 +1,16 @@ +def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, + x_start=0, y_start=0, apply_to="mask"): + """ Scrolls horizontally or vertically a clip, e.g. to make end + credits """ + if h is None: h = clip.h + if w is None: w = clip.w + + xmax = clip.w-w-1 + ymax = clip.h-h-1 + + def f(gf,t): + x = int(max(0, min(xmax, x_start+ round(x_speed*t)))) + y = int(max(0, min(ymax, y_start+ round(y_speed*t)))) + return gf(t)[y:y+h, x:x+w] + + return clip.fl(f, apply_to = apply_to) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/speedx.py b/.venv/Lib/site-packages/moviepy/video/fx/speedx.py new file mode 100644 index 00000000..1c6b82cf --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/speedx.py @@ -0,0 +1,21 @@ +from moviepy.decorators import apply_to_audio, apply_to_mask + + +def speedx(clip, factor = None, final_duration=None): + """ + Returns a clip playing the current clip but at a speed multiplied + by ``factor``. Instead of factor one can indicate the desired + ``final_duration`` of the clip, and the factor will be automatically + computed. + The same effect is applied to the clip's audio and mask if any. + """ + + if final_duration: + factor = 1.0* clip.duration / final_duration + + newclip = clip.fl_time(lambda t: factor * t, apply_to=['mask', 'audio']) + + if clip.duration is not None: + newclip = newclip.set_duration(1.0 * clip.duration / factor) + + return newclip diff --git a/.venv/Lib/site-packages/moviepy/video/fx/supersample.py b/.venv/Lib/site-packages/moviepy/video/fx/supersample.py new file mode 100644 index 00000000..93b9d7a4 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/supersample.py @@ -0,0 +1,13 @@ +import numpy as np + + +def supersample(clip, d, nframes): + """ Replaces each frame at time t by the mean of `nframes` equally spaced frames + taken in the interval [t-d, t+d]. This results in motion blur.""" + + def fl(gf, t): + tt = np.linspace(t-d, t+d, nframes) + avg = np.mean(1.0*np.array([gf(t_) for t_ in tt], dtype='uint16'), axis=0) + return avg.astype("uint8") + + return clip.fl(fl) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/time_mirror.py b/.venv/Lib/site-packages/moviepy/video/fx/time_mirror.py new file mode 100644 index 00000000..1c044e3e --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/time_mirror.py @@ -0,0 +1,13 @@ +from moviepy.decorators import apply_to_audio, apply_to_mask, requires_duration + + +@requires_duration +@apply_to_mask +@apply_to_audio +def time_mirror(self): + """ + Returns a clip that plays the current clip backwards. + The clip must have its ``duration`` attribute set. + The same effect is applied to the clip's audio and mask if any. + """ + return self.fl_time(lambda t: self.duration - t, keep_duration=True) diff --git a/.venv/Lib/site-packages/moviepy/video/fx/time_symmetrize.py b/.venv/Lib/site-packages/moviepy/video/fx/time_symmetrize.py new file mode 100644 index 00000000..de2f3181 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/fx/time_symmetrize.py @@ -0,0 +1,17 @@ +from moviepy.decorators import apply_to_audio, apply_to_mask, requires_duration +from moviepy.video.compositing.concatenate import concatenate_videoclips + +from .time_mirror import time_mirror + + +@requires_duration +@apply_to_mask +def time_symmetrize(clip): + """ + Returns a clip that plays the current clip once forwards and + then once backwards. This is very practival to make video that + loop well, e.g. to create animated GIFs. + This effect is automatically applied to the clip's mask and audio + if they exist. + """ + return concatenate_videoclips([clip, clip.fx( time_mirror )]) diff --git a/.venv/Lib/site-packages/moviepy/video/io/ImageSequenceClip.py b/.venv/Lib/site-packages/moviepy/video/io/ImageSequenceClip.py new file mode 100644 index 00000000..662138c0 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/ImageSequenceClip.py @@ -0,0 +1,164 @@ +import os + +import numpy as np +from imageio import imread + +from ..VideoClip import VideoClip + + +class ImageSequenceClip(VideoClip): + """ + + A VideoClip made from a series of images. + + + Parameters + ----------- + + sequence + Can be one of these: + - The name of a folder (containing only pictures). The pictures + will be considered in alphanumerical order. + - A list of names of image files. In this case you can choose to + load the pictures in memory pictures + - A list of Numpy arrays representing images. In this last case, + masks are not supported currently. + + fps + Number of picture frames to read per second. Instead, you can provide + the duration of each image with durations (see below) + + durations + List of the duration of each picture. + + with_mask + Should the alpha layer of PNG images be considered as a mask ? + + ismask + Will this sequence of pictures be used as an animated mask. + + Notes + ------ + + If your sequence is made of image files, the only image kept in + + + + """ + + + def __init__(self, sequence, fps=None, durations=None, with_mask=True, + ismask=False, load_images=False): + + # CODE WRITTEN AS IT CAME, MAY BE IMPROVED IN THE FUTURE + + if (fps is None) and (durations is None): + raise ValueError("Please provide either 'fps' or 'durations'.") + VideoClip.__init__(self, ismask=ismask) + + # Parse the data + + fromfiles = True + + if isinstance(sequence, list): + if isinstance(sequence[0], str): + if load_images: + sequence = [imread(f) for f in sequence] + fromfiles = False + else: + fromfiles= True + else: + # sequence is already a list of numpy arrays + fromfiles = False + else: + # sequence is a folder name, make it a list of files: + fromfiles = True + sequence = sorted([os.path.join(sequence, f) + for f in os.listdir(sequence)]) + + + #check that all the images are of the same size + if isinstance(sequence[0], str): + size = imread(sequence[0]).shape + else: + size = sequence[0].shape + + for image in sequence: + image1=image + if isinstance(image, str): + image1=imread(image) + if size != image1.shape: + raise Exception("Moviepy: ImageSequenceClip requires all images to be the same size") + + + self.fps = fps + if fps is not None: + durations = [1.0/fps for image in sequence] + self.images_starts = [1.0*i/fps-np.finfo(np.float32).eps for i in range(len(sequence))] + else: + self.images_starts = [0]+list(np.cumsum(durations)) + self.durations = durations + self.duration = sum(durations) + self.end = self.duration + self.sequence = sequence + + def find_image_index(t): + return max([i for i in range(len(self.sequence)) + if self.images_starts[i]<=t]) + + if fromfiles: + + self.lastindex = None + self.lastimage = None + + def make_frame(t): + + index = find_image_index(t) + + if index != self.lastindex: + self.lastimage = imread(self.sequence[index])[:,:,:3] + self.lastindex = index + + return self.lastimage + + if with_mask and (imread(self.sequence[0]).shape[2]==4): + + self.mask = VideoClip(ismask=True) + self.mask.lastindex = None + self.mask.lastimage = None + + def mask_make_frame(t): + + index = find_image_index(t) + if index != self.mask.lastindex: + frame = imread(self.sequence[index])[:,:,3] + self.mask.lastimage = frame.astype(float)/255 + self.mask.lastindex = index + + return self.mask.lastimage + + self.mask.make_frame = mask_make_frame + self.mask.size = mask_make_frame(0).shape[:2][::-1] + + + else: + + def make_frame(t): + + index = find_image_index(t) + return self.sequence[index][:,:,:3] + + if with_mask and (self.sequence[0].shape[2]==4): + + self.mask = VideoClip(ismask=True) + + def mask_make_frame(t): + index = find_image_index(t) + return 1.0*self.sequence[index][:,:,3]/255 + + self.mask.make_frame = mask_make_frame + self.mask.size = mask_make_frame(0).shape[:2][::-1] + + + self.make_frame = make_frame + self.size = make_frame(0).shape[:2][::-1] diff --git a/.venv/Lib/site-packages/moviepy/video/io/VideoFileClip.py b/.venv/Lib/site-packages/moviepy/video/io/VideoFileClip.py new file mode 100644 index 00000000..1d5c8b63 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/VideoFileClip.py @@ -0,0 +1,134 @@ +import os + +from moviepy.audio.io.AudioFileClip import AudioFileClip +from moviepy.Clip import Clip +from moviepy.video.io.ffmpeg_reader import FFMPEG_VideoReader +from moviepy.video.VideoClip import VideoClip + + +class VideoFileClip(VideoClip): + + """ + + A video clip originating from a movie file. For instance: :: + + >>> clip = VideoFileClip("myHolidays.mp4") + >>> clip.close() + >>> with VideoFileClip("myMaskVideo.avi") as clip2: + >>> pass # Implicit close called by context manager. + + + Parameters + ------------ + + filename: + The name of the video file. It can have any extension supported + by ffmpeg: .ogv, .mp4, .mpeg, .avi, .mov etc. + + has_mask: + Set this to 'True' if there is a mask included in the videofile. + Video files rarely contain masks, but some video codecs enable + that. For istance if you have a MoviePy VideoClip with a mask you + can save it to a videofile with a mask. (see also + ``VideoClip.write_videofile`` for more details). + + audio: + Set to `False` if the clip doesn't have any audio or if you do not + wish to read the audio. + + target_resolution: + Set to (desired_height, desired_width) to have ffmpeg resize the frames + before returning them. This is much faster than streaming in high-res + and then resizing. If either dimension is None, the frames are resized + by keeping the existing aspect ratio. + + resize_algorithm: + The algorithm used for resizing. Default: "bicubic", other popular + options include "bilinear" and "fast_bilinear". For more information, see + https://ffmpeg.org/ffmpeg-scaler.html + + fps_source: + The fps value to collect from the metadata. Set by default to 'tbr', but + can be set to 'fps', which may be helpful if importing slow-motion videos + that get messed up otherwise. + + + Attributes + ----------- + + filename: + Name of the original video file. + + fps: + Frames per second in the original file. + + + Read docs for Clip() and VideoClip() for other, more generic, attributes. + + Lifetime + -------- + + Note that this creates subprocesses and locks files. If you construct one of these instances, you must call + close() afterwards, or the subresources will not be cleaned up until the process ends. + + If copies are made, and close() is called on one, it may cause methods on the other copies to fail. + + """ + + def __init__(self, filename, has_mask=False, + audio=True, audio_buffersize=200000, + target_resolution=None, resize_algorithm='bicubic', + audio_fps=44100, audio_nbytes=2, verbose=False, + fps_source='tbr'): + + VideoClip.__init__(self) + + # Make a reader + pix_fmt = "rgba" if has_mask else "rgb24" + self.reader = FFMPEG_VideoReader(filename, pix_fmt=pix_fmt, + target_resolution=target_resolution, + resize_algo=resize_algorithm, + fps_source=fps_source) + + # Make some of the reader's attributes accessible from the clip + self.duration = self.reader.duration + self.end = self.reader.duration + + self.fps = self.reader.fps + self.size = self.reader.size + self.rotation = self.reader.rotation + + self.filename = self.reader.filename + + if has_mask: + + self.make_frame = lambda t: self.reader.get_frame(t)[:,:,:3] + mask_mf = lambda t: self.reader.get_frame(t)[:,:,3]/255.0 + self.mask = (VideoClip(ismask=True, make_frame=mask_mf) + .set_duration(self.duration)) + self.mask.fps = self.fps + + else: + + self.make_frame = lambda t: self.reader.get_frame(t) + + # Make a reader for the audio, if any. + if audio and self.reader.infos['audio_found']: + + self.audio = AudioFileClip(filename, + buffersize=audio_buffersize, + fps=audio_fps, + nbytes=audio_nbytes) + + def close(self): + """ Close the internal reader. """ + if self.reader: + self.reader.close() + self.reader = None + + try: + if self.audio: + self.audio.close() + self.audio = None + except AttributeError: + pass diff --git a/.venv/Lib/site-packages/moviepy/video/io/__init__.py b/.venv/Lib/site-packages/moviepy/video/io/__init__.py new file mode 100644 index 00000000..7ad81e6f --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/__init__.py @@ -0,0 +1,3 @@ +""" +Classes and methods for reading, writing and previewing video files. +""" diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ImageSequenceClip.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ImageSequenceClip.cpython-311.pyc new file mode 100644 index 00000000..32b55b06 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ImageSequenceClip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/VideoFileClip.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/VideoFileClip.cpython-311.pyc new file mode 100644 index 00000000..0c27bfae Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/VideoFileClip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..530ba2f9 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/bindings.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/bindings.cpython-311.pyc new file mode 100644 index 00000000..f045133f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/bindings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/downloader.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/downloader.cpython-311.pyc new file mode 100644 index 00000000..4ef1436f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/downloader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_reader.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_reader.cpython-311.pyc new file mode 100644 index 00000000..4a6e3846 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_reader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_tools.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_tools.cpython-311.pyc new file mode 100644 index 00000000..6a1b13c7 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_tools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_writer.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_writer.cpython-311.pyc new file mode 100644 index 00000000..2f19f50f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/ffmpeg_writer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/gif_writers.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/gif_writers.cpython-311.pyc new file mode 100644 index 00000000..f0f31d81 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/gif_writers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/html_tools.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/html_tools.cpython-311.pyc new file mode 100644 index 00000000..fce904a5 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/html_tools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/preview.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/preview.cpython-311.pyc new file mode 100644 index 00000000..af5b07c0 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/preview.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/__pycache__/sliders.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/sliders.cpython-311.pyc new file mode 100644 index 00000000..c9336c69 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/io/__pycache__/sliders.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/io/bindings.py b/.venv/Lib/site-packages/moviepy/video/io/bindings.py new file mode 100644 index 00000000..4b206f75 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/bindings.py @@ -0,0 +1,32 @@ +""" +This module implements all the functions to communicate with other Python +modules (PIL, matplotlib, mayavi, etc.) +""" + +import numpy as np + + +def PIL_to_npimage(im): + """ Transforms a PIL/Pillow image into a numpy RGB(A) image. + Actually all this do is returning numpy.array(im).""" + return np.array(im) + #w,h = im.size + #d = (4 if im.mode=="RGBA" else 3) + #return +np.frombuffer(im.tobytes(), dtype='uint8').reshape((h,w,d)) + + +def mplfig_to_npimage(fig): + """ Converts a matplotlib figure to a RGB frame after updating the canvas""" + # only the Agg backend now supports the tostring_rgb function + from matplotlib.backends.backend_agg import FigureCanvasAgg + canvas = FigureCanvasAgg(fig) + canvas.draw() # update/draw the elements + + # get the width and the height to resize the matrix + l,b,w,h = canvas.figure.bbox.bounds + w, h = int(w), int(h) + + # exports the canvas to a string buffer and then to a numpy nd.array + buf = canvas.tostring_rgb() + image= np.frombuffer(buf, dtype=np.uint8) + return image.reshape(h,w,3) diff --git a/.venv/Lib/site-packages/moviepy/video/io/downloader.py b/.venv/Lib/site-packages/moviepy/video/io/downloader.py new file mode 100644 index 00000000..5b579de0 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/downloader.py @@ -0,0 +1,34 @@ +""" +Utilities to get a file from the internet +""" + +import os + +import requests + +from moviepy.tools import subprocess_call + + +def download_webfile(url, filename, overwrite=False): + """ Small utility to download the file at 'url' under name 'filename'. + If url is a youtube video ID like z410eauCnH it will download the video + using youtube-dl (install youtube-dl first !). + If the filename already exists and overwrite=False, nothing will happen. + """ + if os.path.exists(filename) and not overwrite: + return + + if '.' in url: + r = requests.get(url, stream=True) + with open(filename, 'wb') as fd: + for chunk in r.iter_content(chunk_size=128): + fd.write(chunk) + + else: + try: + subprocess_call(['youtube-dl', url, '-o', filename]) + except OSError as e: + raise OSError( + e.message + '\n A possible reason is that youtube-dl' + ' is not installed on your computer. Install it with ' + ' "pip install youtube_dl"') diff --git a/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_reader.py b/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_reader.py new file mode 100644 index 00000000..7ef5b2d1 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_reader.py @@ -0,0 +1,393 @@ +""" +This module implements all the functions to read a video or a picture +using ffmpeg. It is quite ugly, as there are many pitfalls to avoid +""" + +from __future__ import division + +import logging +import os +import re +import subprocess as sp +import warnings + +import numpy as np + +from moviepy.compat import DEVNULL, PY3 +from moviepy.config import get_setting # ffmpeg, ffmpeg.exe, etc... +from moviepy.tools import cvsecs + +logging.captureWarnings(True) + + + + + +class FFMPEG_VideoReader: + + def __init__(self, filename, print_infos=False, bufsize = None, + pix_fmt="rgb24", check_duration=True, + target_resolution=None, resize_algo='bicubic', + fps_source='tbr'): + + self.filename = filename + self.proc = None + infos = ffmpeg_parse_infos(filename, print_infos, check_duration, + fps_source) + self.fps = infos['video_fps'] + self.size = infos['video_size'] + self.rotation = infos['video_rotation'] + + if target_resolution: + # revert the order, as ffmpeg used (width, height) + target_resolution = target_resolution[1], target_resolution[0] + + if None in target_resolution: + ratio = 1 + for idx, target in enumerate(target_resolution): + if target: + ratio = target / self.size[idx] + self.size = (int(self.size[0] * ratio), int(self.size[1] * ratio)) + else: + self.size = target_resolution + self.resize_algo = resize_algo + + self.duration = infos['video_duration'] + self.ffmpeg_duration = infos['duration'] + self.nframes = infos['video_nframes'] + + self.infos = infos + + self.pix_fmt = pix_fmt + self.depth = 4 if pix_fmt == 'rgba' else 3 + + if bufsize is None: + w, h = self.size + bufsize = self.depth * w * h + 100 + + self.bufsize= bufsize + self.initialize() + + + self.pos = 1 + self.lastread = self.read_frame() + + + def initialize(self, starttime=0): + """Opens the file, creates the pipe. """ + + self.close() # if any + + if starttime != 0 : + offset = min(1, starttime) + i_arg = ['-ss', "%.06f" % (starttime - offset), + '-i', self.filename, + '-ss', "%.06f" % offset] + else: + i_arg = [ '-i', self.filename] + + cmd = ([get_setting("FFMPEG_BINARY")] + i_arg + + ['-loglevel', 'error', + '-f', 'image2pipe', + '-vf', 'scale=%d:%d' % tuple(self.size), + '-sws_flags', self.resize_algo, + "-pix_fmt", self.pix_fmt, + '-vcodec', 'rawvideo', '-']) + popen_params = {"bufsize": self.bufsize, + "stdout": sp.PIPE, + "stderr": sp.PIPE, + "stdin": DEVNULL} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + self.proc = sp.Popen(cmd, **popen_params) + + + def skip_frames(self, n=1): + """Reads and throws away n frames """ + w, h = self.size + for i in range(n): + self.proc.stdout.read(self.depth*w*h) + #self.proc.stdout.flush() + self.pos += n + + + def read_frame(self): + w, h = self.size + nbytes= self.depth*w*h + + s = self.proc.stdout.read(nbytes) + if len(s) != nbytes: + + warnings.warn("Warning: in file %s, "%(self.filename)+ + "%d bytes wanted but %d bytes read,"%(nbytes, len(s))+ + "at frame %d/%d, at time %.02f/%.02f sec. "%( + self.pos,self.nframes, + 1.0*self.pos/self.fps, + self.duration)+ + "Using the last valid frame instead.", + UserWarning) + + if not hasattr(self, 'lastread'): + raise IOError(("MoviePy error: failed to read the first frame of " + "video file %s. That might mean that the file is " + "corrupted. That may also mean that you are using " + "a deprecated version of FFMPEG. On Ubuntu/Debian " + "for instance the version in the repos is deprecated. " + "Please update to a recent version from the website.")%( + self.filename)) + + result = self.lastread + + else: + if hasattr(np, 'frombuffer'): + result = np.frombuffer(s, dtype='uint8') + else: + result = np.fromstring(s, dtype='uint8') + result.shape =(h, w, len(s)//(w*h)) # reshape((h, w, len(s)//(w*h))) + self.lastread = result + + return result + + def get_frame(self, t): + """ Read a file video frame at time t. + + Note for coders: getting an arbitrary frame in the video with + ffmpeg can be painfully slow if some decoding has to be done. + This function tries to avoid fetching arbitrary frames + whenever possible, by moving between adjacent frames. + """ + + # these definitely need to be rechecked sometime. Seems to work. + + # I use that horrible '+0.00001' hack because sometimes due to numerical + # imprecisions a 3.0 can become a 2.99999999... which makes the int() + # go to the previous integer. This makes the fetching more robust in the + # case where you get the nth frame by writing get_frame(n/fps). + + pos = int(self.fps*t + 0.00001)+1 + + # Initialize proc if it is not open + if not self.proc: + self.initialize(t) + self.pos = pos + self.lastread = self.read_frame() + + if pos == self.pos: + return self.lastread + elif (pos < self.pos) or (pos > self.pos + 100): + self.initialize(t) + self.pos = pos + else: + self.skip_frames(pos-self.pos-1) + result = self.read_frame() + self.pos = pos + return result + + def close(self): + if self.proc: + self.proc.terminate() + self.proc.stdout.close() + self.proc.stderr.close() + self.proc.wait() + self.proc = None + if hasattr(self, 'lastread'): + del self.lastread + + def __del__(self): + self.close() + + +def ffmpeg_read_image(filename, with_mask=True): + """ Read an image file (PNG, BMP, JPEG...). + + Wraps FFMPEG_Videoreader to read just one image. + Returns an ImageClip. + + This function is not meant to be used directly in MoviePy, + use ImageClip instead to make clips out of image files. + + Parameters + ----------- + + filename + Name of the image file. Can be of any format supported by ffmpeg. + + with_mask + If the image has a transparency layer, ``with_mask=true`` will save + this layer as the mask of the returned ImageClip + + """ + pix_fmt = 'rgba' if with_mask else "rgb24" + reader = FFMPEG_VideoReader(filename, pix_fmt=pix_fmt, check_duration=False) + im = reader.lastread + del reader + return im + + +def ffmpeg_parse_infos(filename, print_infos=False, check_duration=True, + fps_source='tbr'): + """Get file infos using ffmpeg. + + Returns a dictionnary with the fields: + "video_found", "video_fps", "duration", "video_nframes", + "video_duration", "audio_found", "audio_fps" + + "video_duration" is slightly smaller than "duration" to avoid + fetching the uncomplete frames at the end, which raises an error. + + """ + + + # open the file in a pipe, provoke an error, read output + is_GIF = filename.endswith('.gif') + cmd = [get_setting("FFMPEG_BINARY"), "-i", filename] + if is_GIF: + cmd += ["-f", "null", "/dev/null"] + + popen_params = {"bufsize": 10**5, + "stdout": sp.PIPE, + "stderr": sp.PIPE, + "stdin": DEVNULL} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + proc = sp.Popen(cmd, **popen_params) + (output, error) = proc.communicate() + infos = error.decode('utf8') + + del proc + + if print_infos: + # print the whole info text returned by FFMPEG + print(infos) + + + lines = infos.splitlines() + if "No such file or directory" in lines[-1]: + raise IOError(("MoviePy error: the file %s could not be found!\n" + "Please check that you entered the correct " + "path.")%filename) + + result = dict() + + + # get duration (in seconds) + result['duration'] = None + + if check_duration: + try: + keyword = ('frame=' if is_GIF else 'Duration: ') + # for large GIFS the "full" duration is presented as the last element in the list. + index = -1 if is_GIF else 0 + line = [l for l in lines if keyword in l][index] + match = re.findall("([0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9])", line)[0] + result['duration'] = cvsecs(match) + except: + raise IOError(("MoviePy error: failed to read the duration of file %s.\n" + "Here are the file infos returned by ffmpeg:\n\n%s")%( + filename, infos)) + + # get the output line that speaks about video + lines_video = [l for l in lines if ' Video: ' in l and re.search('\d+x\d+', l)] + + result['video_found'] = ( lines_video != [] ) + + if result['video_found']: + try: + line = lines_video[0] + + # get the size, of the form 460x320 (w x h) + match = re.search(" [0-9]*x[0-9]*(,| )", line) + s = list(map(int, line[match.start():match.end()-1].split('x'))) + result['video_size'] = s + except: + raise IOError(("MoviePy error: failed to read video dimensions in file %s.\n" + "Here are the file infos returned by ffmpeg:\n\n%s")%( + filename, infos)) + + # Get the frame rate. Sometimes it's 'tbr', sometimes 'fps', sometimes + # tbc, and sometimes tbc/2... + # Current policy: Trust tbr first, then fps unless fps_source is + # specified as 'fps' in which case try fps then tbr + + # If result is near from x*1000/1001 where x is 23,24,25,50, + # replace by x*1000/1001 (very common case for the fps). + + def get_tbr(): + match = re.search("( [0-9]*.| )[0-9]* tbr", line) + + # Sometimes comes as e.g. 12k. We need to replace that with 12000. + s_tbr = line[match.start():match.end()].split(' ')[1] + if "k" in s_tbr: + tbr = float(s_tbr.replace("k", "")) * 1000 + else: + tbr = float(s_tbr) + return tbr + + def get_fps(): + match = re.search("( [0-9]*.| )[0-9]* fps", line) + fps = float(line[match.start():match.end()].split(' ')[1]) + return fps + + if fps_source == 'tbr': + try: + result['video_fps'] = get_tbr() + except: + result['video_fps'] = get_fps() + + elif fps_source == 'fps': + try: + result['video_fps'] = get_fps() + except: + result['video_fps'] = get_tbr() + + # It is known that a fps of 24 is often written as 24000/1001 + # but then ffmpeg nicely rounds it to 23.98, which we hate. + coef = 1000.0/1001.0 + fps = result['video_fps'] + for x in [23,24,25,30,50]: + if (fps!=x) and abs(fps - x*coef) < .01: + result['video_fps'] = x*coef + + if check_duration: + result['video_nframes'] = int(result['duration']*result['video_fps'])+1 + result['video_duration'] = result['duration'] + else: + result['video_nframes'] = 1 + result['video_duration'] = None + # We could have also recomputed the duration from the number + # of frames, as follows: + # >>> result['video_duration'] = result['video_nframes'] / result['video_fps'] + + # get the video rotation info. + try: + rotation_lines = [l for l in lines if 'rotate :' in l and re.search('\d+$', l)] + if len(rotation_lines): + rotation_line = rotation_lines[0] + match = re.search('\d+$', rotation_line) + result['video_rotation'] = int(rotation_line[match.start() : match.end()]) + else: + result['video_rotation'] = 0 + except: + raise IOError(("MoviePy error: failed to read video rotation in file %s.\n" + "Here are the file infos returned by ffmpeg:\n\n%s")%( + filename, infos)) + + + lines_audio = [l for l in lines if ' Audio: ' in l] + + result['audio_found'] = lines_audio != [] + + if result['audio_found']: + line = lines_audio[0] + try: + match = re.search(" [0-9]* Hz", line) + hz_string = line[match.start()+1:match.end()-3] # Removes the 'hz' from the end + result['audio_fps'] = int(hz_string) + except: + result['audio_fps'] = 'unknown' + + return result diff --git a/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_tools.py b/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_tools.py new file mode 100644 index 00000000..ef664211 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_tools.py @@ -0,0 +1,68 @@ +""" Misc. bindings to ffmpeg and ImageMagick.""" + +import os +import subprocess as sp +import sys + +from moviepy.config import get_setting +from moviepy.tools import subprocess_call + + +def ffmpeg_movie_from_frames(filename, folder, fps, digits=6, bitrate='v'): + """ + Writes a movie out of the frames (picture files) in a folder. + Almost deprecated. + """ + s = "%" + "%02d" % digits + "d.png" + cmd = [get_setting("FFMPEG_BINARY"), "-y", "-f","image2", + "-r", "%d"%fps, + "-i", os.path.join(folder,folder) + '/' + s, + "-b", "%dk"%bitrate, + "-r", "%d"%fps, + filename] + + subprocess_call(cmd) + + +def ffmpeg_extract_subclip(filename, t1, t2, targetname=None): + """ Makes a new video file playing video file ``filename`` between + the times ``t1`` and ``t2``. """ + name, ext = os.path.splitext(filename) + if not targetname: + T1, T2 = [int(1000*t) for t in [t1, t2]] + targetname = "%sSUB%d_%d.%s" % (name, T1, T2, ext) + + cmd = [get_setting("FFMPEG_BINARY"),"-y", + "-ss", "%0.2f"%t1, + "-i", filename, + "-t", "%0.2f"%(t2-t1), + "-map", "0", "-vcodec", "copy", "-acodec", "copy", targetname] + + subprocess_call(cmd) + + +def ffmpeg_merge_video_audio(video,audio,output, vcodec='copy', + acodec='copy', ffmpeg_output=False, + logger = 'bar'): + """ merges video file ``video`` and audio file ``audio`` into one + movie file ``output``. """ + cmd = [get_setting("FFMPEG_BINARY"), "-y", "-i", audio,"-i", video, + "-vcodec", vcodec, "-acodec", acodec, output] + + subprocess_call(cmd, logger = logger) + + +def ffmpeg_extract_audio(inputfile,output,bitrate=3000,fps=44100): + """ extract the sound from a video file and save it in ``output`` """ + cmd = [get_setting("FFMPEG_BINARY"), "-y", "-i", inputfile, "-ab", "%dk"%bitrate, + "-ar", "%d"%fps, output] + subprocess_call(cmd) + + +def ffmpeg_resize(video,output,size): + """ resizes ``video`` to new size ``size`` and write the result + in file ``output``. """ + cmd= [get_setting("FFMPEG_BINARY"), "-i", video, "-vf", "scale=%d:%d"%(size[0], size[1]), + output] + + subprocess_call(cmd) diff --git a/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_writer.py b/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_writer.py new file mode 100644 index 00000000..7fdd1dd6 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/ffmpeg_writer.py @@ -0,0 +1,269 @@ +""" +On the long term this will implement several methods to make videos +out of VideoClips +""" + +import os +import subprocess as sp + +import numpy as np +from proglog import proglog + +from moviepy.compat import DEVNULL, PY3 +from moviepy.config import get_setting + + +class FFMPEG_VideoWriter: + """ A class for FFMPEG-based video writing. + + A class to write videos using ffmpeg. ffmpeg will write in a large + choice of formats. + + Parameters + ----------- + + filename + Any filename like 'video.mp4' etc. but if you want to avoid + complications it is recommended to use the generic extension + '.avi' for all your videos. + + size + Size (width,height) of the output video in pixels. + + fps + Frames per second in the output video file. + + codec + FFMPEG codec. It seems that in terms of quality the hierarchy is + 'rawvideo' = 'png' > 'mpeg4' > 'libx264' + 'png' manages the same lossless quality as 'rawvideo' but yields + smaller files. Type ``ffmpeg -codecs`` in a terminal to get a list + of accepted codecs. + + Note for default 'libx264': by default the pixel format yuv420p + is used. If the video dimensions are not both even (e.g. 720x405) + another pixel format is used, and this can cause problem in some + video readers. + + audiofile + Optional: The name of an audio file that will be incorporated + to the video. + + preset + Sets the time that FFMPEG will take to compress the video. The slower, + the better the compression rate. Possibilities are: ultrafast,superfast, + veryfast, faster, fast, medium (default), slow, slower, veryslow, + placebo. + + bitrate + Only relevant for codecs which accept a bitrate. "5000k" offers + nice results in general. + + withmask + Boolean. Set to ``True`` if there is a mask in the video to be + encoded. + + """ + + def __init__(self, filename, size, fps, codec="libx264", audiofile=None, + preset="medium", bitrate=None, withmask=False, + logfile=None, threads=None, ffmpeg_params=None): + + if logfile is None: + logfile = sp.PIPE + + self.filename = filename + self.codec = codec + self.ext = self.filename.split(".")[-1] + + # order is important + cmd = [ + get_setting("FFMPEG_BINARY"), + '-y', + '-loglevel', 'error' if logfile == sp.PIPE else 'info', + '-f', 'rawvideo', + '-vcodec', 'rawvideo', + '-s', '%dx%d' % (size[0], size[1]), + '-pix_fmt', 'rgba' if withmask else 'rgb24', + '-r', '%.02f' % fps, + '-an', '-i', '-' + ] + if audiofile is not None: + cmd.extend([ + '-i', audiofile, + '-acodec', 'copy' + ]) + cmd.extend([ + '-vcodec', codec, + '-preset', preset, + ]) + if ffmpeg_params is not None: + cmd.extend(ffmpeg_params) + if bitrate is not None: + cmd.extend([ + '-b', bitrate + ]) + + if threads is not None: + cmd.extend(["-threads", str(threads)]) + + if ((codec == 'libx264') and + (size[0] % 2 == 0) and + (size[1] % 2 == 0)): + cmd.extend([ + '-pix_fmt', 'yuv420p' + ]) + cmd.extend([ + filename + ]) + + popen_params = {"stdout": DEVNULL, + "stderr": logfile, + "stdin": sp.PIPE} + + # This was added so that no extra unwanted window opens on windows + # when the child process is created + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 # CREATE_NO_WINDOW + + self.proc = sp.Popen(cmd, **popen_params) + + + def write_frame(self, img_array): + """ Writes one frame in the file.""" + try: + if PY3: + self.proc.stdin.write(img_array.tobytes()) + else: + self.proc.stdin.write(img_array.tostring()) + except IOError as err: + _, ffmpeg_error = self.proc.communicate() + error = (str(err) + ("\n\nMoviePy error: FFMPEG encountered " + "the following error while writing file %s:" + "\n\n %s" % (self.filename, str(ffmpeg_error)))) + + if b"Unknown encoder" in ffmpeg_error: + + error = error+("\n\nThe video export " + "failed because FFMPEG didn't find the specified " + "codec for video encoding (%s). Please install " + "this codec or change the codec when calling " + "write_videofile. For instance:\n" + " >>> clip.write_videofile('myvid.webm', codec='libvpx')")%(self.codec) + + elif b"incorrect codec parameters ?" in ffmpeg_error: + + error = error+("\n\nThe video export " + "failed, possibly because the codec specified for " + "the video (%s) is not compatible with the given " + "extension (%s). Please specify a valid 'codec' " + "argument in write_videofile. This would be 'libx264' " + "or 'mpeg4' for mp4, 'libtheora' for ogv, 'libvpx for webm. " + "Another possible reason is that the audio codec was not " + "compatible with the video codec. For instance the video " + "extensions 'ogv' and 'webm' only allow 'libvorbis' (default) as a" + "video codec." + )%(self.codec, self.ext) + + elif b"encoder setup failed" in ffmpeg_error: + + error = error+("\n\nThe video export " + "failed, possibly because the bitrate you specified " + "was too high or too low for the video codec.") + + elif b"Invalid encoder type" in ffmpeg_error: + + error = error + ("\n\nThe video export failed because the codec " + "or file extension you provided is not a video") + + + raise IOError(error) + + def close(self): + if self.proc: + self.proc.stdin.close() + if self.proc.stderr is not None: + self.proc.stderr.close() + self.proc.wait() + + self.proc = None + + # Support the Context Manager protocol, to ensure that resources are cleaned up. + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + +def ffmpeg_write_video(clip, filename, fps, codec="libx264", bitrate=None, + preset="medium", withmask=False, write_logfile=False, + audiofile=None, verbose=True, threads=None, ffmpeg_params=None, + logger='bar'): + """ Write the clip to a videofile. See VideoClip.write_videofile for details + on the parameters. + """ + logger = proglog.default_bar_logger(logger) + + if write_logfile: + logfile = open(filename + ".log", 'w+') + else: + logfile = None + logger(message='Moviepy - Writing video %s\n' % filename) + with FFMPEG_VideoWriter(filename, clip.size, fps, codec = codec, + preset=preset, bitrate=bitrate, logfile=logfile, + audiofile=audiofile, threads=threads, + ffmpeg_params=ffmpeg_params) as writer: + + nframes = int(clip.duration*fps) + + for t,frame in clip.iter_frames(logger=logger, with_times=True, + fps=fps, dtype="uint8"): + if withmask: + mask = (255*clip.mask.get_frame(t)) + if mask.dtype != "uint8": + mask = mask.astype("uint8") + frame = np.dstack([frame,mask]) + + writer.write_frame(frame) + + if write_logfile: + logfile.close() + logger(message='Moviepy - Done !') + + +def ffmpeg_write_image(filename, image, logfile=False): + """ Writes an image (HxWx3 or HxWx4 numpy array) to a file, using + ffmpeg. """ + + if image.dtype != 'uint8': + image = image.astype("uint8") + + cmd = [ get_setting("FFMPEG_BINARY"), '-y', + '-s', "%dx%d"%(image.shape[:2][::-1]), + "-f", 'rawvideo', + '-pix_fmt', "rgba" if (image.shape[2] == 4) else "rgb24", + '-i','-', filename] + + if logfile: + log_file = open(filename + ".log", 'w+') + else: + log_file = sp.PIPE + + popen_params = {"stdout": DEVNULL, + "stderr": log_file, + "stdin": sp.PIPE} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + proc = sp.Popen(cmd, **popen_params) + out, err = proc.communicate(image.tostring()) + + if proc.returncode: + err = "\n".join(["[MoviePy] Running : %s\n" % cmd, + "WARNING: this command returned an error:", + err.decode('utf8')]) + raise IOError(err) + + del proc diff --git a/.venv/Lib/site-packages/moviepy/video/io/gif_writers.py b/.venv/Lib/site-packages/moviepy/video/io/gif_writers.py new file mode 100644 index 00000000..b17d0e66 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/gif_writers.py @@ -0,0 +1,285 @@ +import os +import subprocess as sp + +import numpy as np +import proglog + +from moviepy.compat import DEVNULL +from moviepy.config import get_setting +from moviepy.decorators import requires_duration, use_clip_fps_by_default +from moviepy.tools import subprocess_call + +try: + import imageio + IMAGEIO_FOUND = True +except ImportError: + IMAGEIO_FOUND = False + + +@requires_duration +@use_clip_fps_by_default +def write_gif_with_tempfiles(clip, filename, fps=None, program= 'ImageMagick', + opt="OptimizeTransparency", fuzz=1, verbose=True, + loop=0, dispose=True, colors=None, logger='bar'): + """ Write the VideoClip to a GIF file. + + + Converts a VideoClip into an animated GIF using ImageMagick + or ffmpeg. Does the same as write_gif (see this one for more + docstring), but writes every frame to a file instead of passing + them in the RAM. Useful on computers with little RAM. + + """ + logger = proglog.default_bar_logger(logger) + fileName, ext = os.path.splitext(filename) + tt = np.arange(0,clip.duration, 1.0/fps) + + tempfiles = [] + + logger(message='MoviePy - Building file %s\n' % filename) + logger(message='MoviePy - - Generating GIF frames') + + + for i, t in logger.iter_bar(t=list(enumerate(tt))): + + name = "%s_GIFTEMP%04d.png"%(fileName, i+1) + tempfiles.append(name) + clip.save_frame(name, t, withmask=True) + + delay = int(100.0/fps) + + if program == "ImageMagick": + logger(message='MoviePy - - Optimizing GIF with ImageMagick...') + cmd = [get_setting("IMAGEMAGICK_BINARY"), + '-delay' , '%d'%delay, + "-dispose" ,"%d"%(2 if dispose else 1), + "-loop" , "%d"%loop, + "%s_GIFTEMP*.png"%fileName, + "-coalesce", + "-fuzz", "%02d"%fuzz + "%", + "-layers", "%s"%opt, + ]+(["-colors", "%d"%colors] if colors is not None else [])+[ + filename] + + elif program == "ffmpeg": + + cmd = [get_setting("FFMPEG_BINARY"), '-y', + '-f', 'image2', '-r',str(fps), + '-i', fileName+'_GIFTEMP%04d.png', + '-r',str(fps), + filename] + + try: + subprocess_call(cmd, logger=logger) + logger(message='MoviePy - GIF ready: %s.' % filename) + + except (IOError,OSError) as err: + + error = ("MoviePy Error: creation of %s failed because " + "of the following error:\n\n%s.\n\n."%(filename, str(err))) + + if program == "ImageMagick": + error = error + ("This error can be due to the fact that " + "ImageMagick is not installed on your computer, or " + "(for Windows users) that you didn't specify the " + "path to the ImageMagick binary in file config_defaults.py." ) + + raise IOError(error) + + for f in tempfiles: + os.remove(f) + + + +@requires_duration +@use_clip_fps_by_default +def write_gif(clip, filename, fps=None, program= 'ImageMagick', + opt="OptimizeTransparency", fuzz=1, verbose=True, withmask=True, + loop=0, dispose=True, colors=None, logger='bar'): + """ Write the VideoClip to a GIF file, without temporary files. + + Converts a VideoClip into an animated GIF using ImageMagick + or ffmpeg. + + + Parameters + ----------- + + filename + Name of the resulting gif file. + + fps + Number of frames per second (see note below). If it + isn't provided, then the function will look for the clip's + ``fps`` attribute (VideoFileClip, for instance, have one). + + program + Software to use for the conversion, either 'ImageMagick' or + 'ffmpeg'. + + opt + (ImageMagick only) optimalization to apply, either + 'optimizeplus' or 'OptimizeTransparency'. + + fuzz + (ImageMagick only) Compresses the GIF by considering that + the colors that are less than fuzz% different are in fact + the same. + + + Notes + ----- + + The gif will be playing the clip in real time (you can + only change the frame rate). If you want the gif to be played + slower than the clip you will use :: + + >>> # slow down clip 50% and make it a gif + >>> myClip.speedx(0.5).write_gif('myClip.gif') + + """ + + # + # We use processes chained with pipes. + # + # if program == 'ffmpeg' + # frames --ffmpeg--> gif + # + # if program == 'ImageMagick' and optimize == (None, False) + # frames --ffmpeg--> bmp frames --ImageMagick--> gif + # + # + # if program == 'ImageMagick' and optimize != (None, False) + # frames -ffmpeg-> bmp frames -ImagMag-> gif -ImagMag-> better gif + # + + delay= 100.0/fps + logger = proglog.default_bar_logger(logger) + if clip.mask is None: + withmask = False + + cmd1 = [get_setting("FFMPEG_BINARY"), '-y', '-loglevel', 'error', + '-f', 'rawvideo', + '-vcodec','rawvideo', '-r', "%.02f"%fps, + '-s', "%dx%d"%(clip.w, clip.h), + '-pix_fmt', ('rgba' if withmask else 'rgb24'), + '-i', '-'] + + popen_params = {"stdout": DEVNULL, + "stderr": DEVNULL, + "stdin": DEVNULL} + + if os.name == "nt": + popen_params["creationflags"] = 0x08000000 + + if program == "ffmpeg": + popen_params["stdin"] = sp.PIPE + popen_params["stdout"] = DEVNULL + + proc1 = sp.Popen(cmd1+[ '-pix_fmt', ('rgba' if withmask else 'rgb24'), + '-r', "%.02f"%fps, filename], **popen_params) + else: + + popen_params["stdin"] = sp.PIPE + popen_params["stdout"] = sp.PIPE + + proc1 = sp.Popen(cmd1+ ['-f', 'image2pipe', '-vcodec', 'bmp', '-'], + **popen_params) + + if program == 'ImageMagick': + + cmd2 = [get_setting("IMAGEMAGICK_BINARY"), '-delay', "%.02f"%(delay), + "-dispose" ,"%d"%(2 if dispose else 1), + '-loop', '%d'%loop, '-', '-coalesce'] + + if (opt in [False, None]): + popen_params["stdin"] = proc1.stdout + popen_params["stdout"] = DEVNULL + proc2 = sp.Popen(cmd2+[filename], **popen_params) + + else: + popen_params["stdin"] = proc1.stdout + popen_params["stdout"] = sp.PIPE + proc2 = sp.Popen(cmd2+['gif:-'], **popen_params) + + if opt: + + cmd3 = [get_setting("IMAGEMAGICK_BINARY"), '-', + '-fuzz', '%d'%fuzz+'%', '-layers', opt + ]+(["-colors", "%d"%colors] if colors is not None else [])+[ + filename] + + popen_params["stdin"] = proc2.stdout + popen_params["stdout"] = DEVNULL + proc3 = sp.Popen(cmd3, **popen_params) + + # We send all the frames to the first process + logger(message='MoviePy - Building file %s' % filename) + logger(message='MoviePy - - Generating GIF frames.') + try: + for t,frame in clip.iter_frames(fps=fps, logger=logger, + with_times=True, dtype="uint8"): + if withmask: + mask = 255 * clip.mask.get_frame(t) + frame = np.dstack([frame, mask]).astype('uint8') + proc1.stdin.write(frame.tostring()) + + except IOError as err: + + error = ("[MoviePy] Error: creation of %s failed because " + "of the following error:\n\n%s.\n\n."%(filename, str(err))) + + if program == "ImageMagick": + error = error + ("This can be due to the fact that " + "ImageMagick is not installed on your computer, or " + "(for Windows users) that you didn't specify the " + "path to the ImageMagick binary in file config_defaults.py." ) + + raise IOError(error) + if program == 'ImageMagick': + logger(message='MoviePy - - Optimizing GIF with ImageMagick.') + proc1.stdin.close() + proc1.wait() + if program == 'ImageMagick': + proc2.wait() + if opt: + proc3.wait() + logger(message='MoviePy - - File ready: %s.' % filename) + + +def write_gif_with_image_io(clip, filename, fps=None, opt=0, loop=0, + colors=None, verbose=True, logger='bar'): + """ + Writes the gif with the Python library ImageIO (calls FreeImage). + + Parameters + ----------- + opt + + """ + + if colors is None: + colors = 256 + logger = proglog.default_bar_logger(logger) + + if not IMAGEIO_FOUND: + raise ImportError("Writing a gif with imageio requires ImageIO installed," + " with e.g. 'pip install imageio'") + + if fps is None: + fps = clip.fps + + quantizer = 0 if opt != 0 else 'nq' + + writer = imageio.save( + filename, + duration=1.0/fps, + quantizer=quantizer, + palettesize=colors, + loop=loop + ) + logger(message='MoviePy - Building file %s with imageio.' % filename) + + for frame in clip.iter_frames(fps=fps, logger=logger, dtype='uint8'): + + writer.append_data(frame) diff --git a/.venv/Lib/site-packages/moviepy/video/io/html_tools.py b/.venv/Lib/site-packages/moviepy/video/io/html_tools.py new file mode 100644 index 00000000..f24af513 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/html_tools.py @@ -0,0 +1,221 @@ +""" +This module implements ipython_display +A function to embed images/videos/audio in the IPython Notebook +""" + +# Notes: +# All media are physically embedded in the IPython Notebook +# (instead of simple links to the original files) +# That is because most browsers use a cache system and they won't +# properly refresh the media when the original files are changed. + +import os +from base64 import b64encode + +from moviepy.audio.AudioClip import AudioClip +from moviepy.tools import extensions_dict + +from ..VideoClip import ImageClip, VideoClip +from .ffmpeg_reader import ffmpeg_parse_infos + +try: + from IPython.display import HTML + ipython_available = True + class HTML2(HTML): + def __add__(self, other): + return HTML2(self.data+other.data) + +except ImportError: + ipython_available = False + + +sorry = "Sorry, seems like your browser doesn't support HTML5 audio/video" +templates = {"audio":(""), + "image":"", + "video":("")} + + +def html_embed(clip, filetype=None, maxduration=60, rd_kwargs=None, + center=True, **html_kwargs): + """ Returns HTML5 code embedding the clip + + clip + Either a file name, or a clip to preview. + Either an image, a sound or a video. Clips will actually be + written to a file and embedded as if a filename was provided. + + + filetype + One of 'video','image','audio'. If None is given, it is determined + based on the extension of ``filename``, but this can bug. + + rd_kwargs + keyword arguments for the rendering, like {'fps':15, 'bitrate':'50k'} + + + **html_kwargs + Allow you to give some options, like width=260, autoplay=True, + loop=1 etc. + + Examples + ========= + + >>> import moviepy.editor as mpy + >>> # later ... + >>> clip.write_videofile("test.mp4") + >>> mpy.ipython_display("test.mp4", width=360) + + >>> clip.audio.write_audiofile('test.ogg') # Sound ! + >>> mpy.ipython_display('test.ogg') + + >>> clip.write_gif("test.gif") + >>> mpy.ipython_display('test.gif') + + >>> clip.save_frame("first_frame.jpeg") + >>> mpy.ipython_display("first_frame.jpeg") + + """ + + if rd_kwargs is None: + rd_kwargs = {} + + if "Clip" in str(clip.__class__): + TEMP_PREFIX = "__temp__" + if isinstance(clip,ImageClip): + filename = TEMP_PREFIX+".png" + kwargs = {'filename':filename, 'withmask':True} + kwargs.update(rd_kwargs) + clip.save_frame(**kwargs) + elif isinstance(clip,VideoClip): + filename = TEMP_PREFIX+".mp4" + kwargs = {'filename':filename, 'verbose':False, 'preset':'ultrafast'} + kwargs.update(rd_kwargs) + clip.write_videofile(**kwargs) + elif isinstance(clip,AudioClip): + filename = TEMP_PREFIX+".mp3" + kwargs = {'filename': filename, 'verbose':False} + kwargs.update(rd_kwargs) + clip.write_audiofile(**kwargs) + else: + raise ValueError("Unknown class for the clip. Cannot embed and preview.") + + return html_embed(filename, maxduration=maxduration, rd_kwargs=rd_kwargs, + center=center, **html_kwargs) + + filename = clip + options = " ".join(["%s='%s'"%(str(k), str(v)) for k,v in html_kwargs.items()]) + name, ext = os.path.splitext(filename) + ext = ext[1:] + + if filetype is None: + ext = filename.split('.')[-1].lower() + if ext == "gif": + filetype = 'image' + elif ext in extensions_dict: + filetype = extensions_dict[ext]['type'] + else: + raise ValueError("No file type is known for the provided file. Please provide " + "argument `filetype` (one of 'image', 'video', 'sound') to the " + "ipython display function.") + + + if filetype== 'video': + # The next lines set the HTML5-cvompatible extension and check that the + # extension is HTML5-valid + exts_htmltype = {'mp4': 'mp4', 'webm':'webm', 'ogv':'ogg'} + allowed_exts = " ".join(exts_htmltype.keys()) + try: + ext = exts_htmltype[ext] + except: + raise ValueError("This video extension cannot be displayed in the " + "IPython Notebook. Allowed extensions: "+allowed_exts) + + if filetype in ['audio', 'video']: + + duration = ffmpeg_parse_infos(filename)['duration'] + if duration > maxduration: + raise ValueError("The duration of video %s (%.1f) exceeds the 'maxduration' "%(filename, duration)+ + "attribute. You can increase 'maxduration', by passing 'maxduration' parameter" + "to ipython_display function." + "But note that embedding large videos may take all the memory away !") + + with open(filename, "rb") as f: + data= b64encode(f.read()).decode("utf-8") + + template = templates[filetype] + + result = template%{'data':data, 'options':options, 'ext':ext} + if center: + result = r"
%s
"%result + + return result + + +def ipython_display(clip, filetype=None, maxduration=60, t=None, fps=None, + rd_kwargs=None, center=True, **html_kwargs): + """ + clip + Either the name of a file, or a clip to preview. The clip will + actually be written to a file and embedded as if a filename was + provided. + + filetype: + One of 'video','image','audio'. If None is given, it is determined + based on the extension of ``filename``, but this can bug. + + maxduration + An error will be raised if the clip's duration is more than the indicated + value (in seconds), to avoid spoiling the browser's cache and the RAM. + + t + If not None, only the frame at time t will be displayed in the notebook, + instead of a video of the clip + + fps + Enables to specify an fps, as required for clips whose fps is unknown. + + **kwargs: + Allow you to give some options, like width=260, etc. When editing + looping gifs, a good choice is loop=1, autoplay=1. + + Remarks: If your browser doesn't support HTML5, this should warn you. + If nothing is displayed, maybe your file or filename is wrong. + Important: The media will be physically embedded in the notebook. + + Examples + ========= + + >>> import moviepy.editor as mpy + >>> # later ... + >>> clip.write_videofile("test.mp4") + >>> mpy.ipython_display("test.mp4", width=360) + + >>> clip.audio.write_audiofile('test.ogg') # Sound ! + >>> mpy.ipython_display('test.ogg') + + >>> clip.write_gif("test.gif") + >>> mpy.ipython_display('test.gif') + + >>> clip.save_frame("first_frame.jpeg") + >>> mpy.ipython_display("first_frame.jpeg") + """ + + if not ipython_available: + raise ImportError("Only works inside an IPython Notebook") + + if rd_kwargs is None: + rd_kwargs = {} + + if fps is not None: + rd_kwargs['fps'] = fps + + if t is not None: + clip = clip.to_ImageClip(t) + + return HTML2(html_embed(clip, filetype=filetype, maxduration=maxduration, + center=center, rd_kwargs=rd_kwargs, **html_kwargs)) diff --git a/.venv/Lib/site-packages/moviepy/video/io/preview.py b/.venv/Lib/site-packages/moviepy/video/io/preview.py new file mode 100644 index 00000000..fbab22d4 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/preview.py @@ -0,0 +1,151 @@ +import threading +import time + +import numpy as np + +import pygame as pg +from moviepy.decorators import convert_masks_to_RGB, requires_duration +from moviepy.tools import cvsecs + +pg.init() +pg.display.set_caption('MoviePy') + + +def imdisplay(imarray, screen=None): + """Splashes the given image array on the given pygame screen """ + a = pg.surfarray.make_surface(imarray.swapaxes(0, 1)) + if screen is None: + screen = pg.display.set_mode(imarray.shape[:2][::-1]) + screen.blit(a, (0, 0)) + pg.display.flip() + + +@convert_masks_to_RGB +def show(clip, t=0, with_mask=True, interactive=False): + """ + Splashes the frame of clip corresponding to time ``t``. + + Parameters + ------------ + + t + Time in seconds of the frame to display. + + with_mask + ``False`` if the clip has a mask but you want to see the clip + without the mask. + + """ + + if isinstance(t, tuple): + t = cvsecs(*t) + + if with_mask and (clip.mask is not None): + import moviepy.video.compositing.CompositeVideoClip as cvc + clip = cvc.CompositeVideoClip([clip.set_position((0, 0))]) + img = clip.get_frame(t) + imdisplay(img) + + if interactive: + result = [] + while True: + for event in pg.event.get(): + if event.type == pg.KEYDOWN: + if event.key == pg.K_ESCAPE: + print("Keyboard interrupt") + return result + elif event.type == pg.MOUSEBUTTONDOWN: + x, y = pg.mouse.get_pos() + rgb = img[y, x] + result.append({'position': (x, y), 'color': rgb}) + print("position, color : ", "%s, %s" % + (str((x, y)), str(rgb))) + time.sleep(.03) + + +@requires_duration +@convert_masks_to_RGB +def preview(clip, fps=15, audio=True, audio_fps=22050, audio_buffersize=3000, + audio_nbytes=2, fullscreen=False): + """ + Displays the clip in a window, at the given frames per second + (of movie) rate. It will avoid that the clip be played faster + than normal, but it cannot avoid the clip to be played slower + than normal if the computations are complex. In this case, try + reducing the ``fps``. + + Parameters + ------------ + + fps + Number of frames per seconds in the displayed video. + + audio + ``True`` (default) if you want the clip's audio be played during + the preview. + + audio_fps + The frames per second to use when generating the audio sound. + + fullscreen + ``True`` if you want the preview to be displayed fullscreen. + + """ + if fullscreen: + flags = pg.FULLSCREEN + else: + flags = 0 + + # compute and splash the first image + screen = pg.display.set_mode(clip.size, flags) + + audio = audio and (clip.audio is not None) + + if audio: + # the sound will be played in parrallel. We are not + # parralellizing it on different CPUs because it seems that + # pygame and openCV already use several cpus it seems. + + # two synchro-flags to tell whether audio and video are ready + videoFlag = threading.Event() + audioFlag = threading.Event() + # launch the thread + audiothread = threading.Thread(target=clip.audio.preview, + args=(audio_fps, + audio_buffersize, + audio_nbytes, + audioFlag, videoFlag)) + audiothread.start() + + img = clip.get_frame(0) + imdisplay(img, screen) + if audio: # synchronize with audio + videoFlag.set() # say to the audio: video is ready + audioFlag.wait() # wait for the audio to be ready + + result = [] + + t0 = time.time() + for t in np.arange(1.0 / fps, clip.duration-.001, 1.0 / fps): + + img = clip.get_frame(t) + + for event in pg.event.get(): + if event.type == pg.QUIT or \ + (event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE): + if audio: + videoFlag.clear() + print("Interrupt") + return result + + elif event.type == pg.MOUSEBUTTONDOWN: + x, y = pg.mouse.get_pos() + rgb = img[y, x] + result.append({'time': t, 'position': (x, y), + 'color': rgb}) + print("time, position, color : ", "%.03f, %s, %s" % + (t, str((x, y)), str(rgb))) + + t1 = time.time() + time.sleep(max(0, t - (t1-t0))) + imdisplay(img, screen) diff --git a/.venv/Lib/site-packages/moviepy/video/io/sliders.py b/.venv/Lib/site-packages/moviepy/video/io/sliders.py new file mode 100644 index 00000000..f94eae0e --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/io/sliders.py @@ -0,0 +1,74 @@ +import matplotlib.pyplot as plt +from matplotlib.widgets import Button, Slider + + +def sliders(f, sliders_properties, wait_for_validation = False): + """ A light GUI to manually explore and tune the outputs of + a function. + slider_properties is a list of dicts (arguments for Slider ) + + def volume(x,y,z): + return x*y*z + + intervals = [ { 'label' : 'width', 'valmin': 1 , 'valmax': 5 }, + { 'label' : 'height', 'valmin': 1 , 'valmax': 5 }, + { 'label' : 'depth', 'valmin': 1 , 'valmax': 5 } ] + inputExplorer(volume,intervals) + """ + + nVars = len(sliders_properties) + slider_width = 1.0/nVars + + # CREATE THE CANVAS + + figure,ax = plt.subplots(1) + figure.canvas.set_window_title( "Inputs for '%s'"%(f.func_name) ) + + # choose an appropriate height + + width,height = figure.get_size_inches() + height = min(0.5*nVars,8) + figure.set_size_inches(width,height,forward = True) + + + # hide the axis + ax.set_frame_on(False) + ax.get_xaxis().set_visible(False) + ax.get_yaxis().set_visible(False) + + + # CREATE THE SLIDERS + + sliders = [] + + for i, properties in enumerate(sliders_properties): + ax = plt.axes([0.1 , 0.95-0.9*(i+1)*slider_width, + 0.8 , 0.8* slider_width]) + if not isinstance(properties,dict): + properties =dict(zip(['label','valmin', 'valmax', 'valinit'], + properties)) + sliders.append( Slider(ax=ax, **properties) ) + + + # CREATE THE CALLBACK FUNCTIONS + + def on_changed(event) : + res = f(*(s.val for s in sliders)) + if res is not None: + print( res ) + + def on_key_press(event): + if event.key is 'enter': + on_changed(event) + + figure.canvas.mpl_connect('key_press_event', on_key_press) + + # AUTOMATIC UPDATE ? + + if not wait_for_validation: + for s in sliders : + s.on_changed(on_changed) + + + # DISPLAY THE SLIDERS + plt.show() diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__init__.py b/.venv/Lib/site-packages/moviepy/video/tools/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..90443c10 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/credits.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/credits.cpython-311.pyc new file mode 100644 index 00000000..febd14cb Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/credits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/cuts.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/cuts.cpython-311.pyc new file mode 100644 index 00000000..4e0b6179 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/cuts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/drawing.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/drawing.cpython-311.pyc new file mode 100644 index 00000000..a7bd9450 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/drawing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/interpolators.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/interpolators.cpython-311.pyc new file mode 100644 index 00000000..4c6a479f Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/interpolators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/segmenting.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/segmenting.cpython-311.pyc new file mode 100644 index 00000000..f01c1fb8 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/segmenting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/subtitles.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/subtitles.cpython-311.pyc new file mode 100644 index 00000000..1d6fbb90 Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/subtitles.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/tracking.cpython-311.pyc b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/tracking.cpython-311.pyc new file mode 100644 index 00000000..9060759d Binary files /dev/null and b/.venv/Lib/site-packages/moviepy/video/tools/__pycache__/tracking.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/moviepy/video/tools/credits.py b/.venv/Lib/site-packages/moviepy/video/tools/credits.py new file mode 100644 index 00000000..a5adfbb1 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/tools/credits.py @@ -0,0 +1,118 @@ +""" +This module contains different functions to make end and opening +credits, even though it is difficult to fill everyone needs in this +matter. +""" + +from moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip +from moviepy.video.fx.resize import resize +from moviepy.video.VideoClip import ImageClip, TextClip + + +def credits1(creditfile, width, stretch=30, color='white', stroke_color='black', + stroke_width=2, font='Impact-Normal', fontsize=60, gap=0): + """ + + Parameters + ----------- + + creditfile + A text file whose content must be as follows: :: + + # This is a comment + # The next line says : leave 4 blank lines + .blank 4 + + ..Executive Story Editor + MARCEL DURAND + + ..Associate Producers + MARTIN MARCEL + DIDIER MARTIN + + ..Music Supervisor + JEAN DIDIER + + width + Total width of the credits text in pixels + + gap + Horizontal gap in pixels between the jobs and the names + + color + Color of the text. See ``TextClip.list('color')`` + for a list of acceptable names. + + font + Name of the font to use. See ``TextClip.list('font')`` for + the list of fonts you can use on your computer. + + fontsize + Size of font to use + + stroke_color + Color of the stroke (=contour line) of the text. If ``None``, + there will be no stroke. + + stroke_width + Width of the stroke, in pixels. Can be a float, like 1.5. + + + Returns + --------- + + image + An ImageClip instance that looks like this and can be scrolled + to make some credits: + + Executive Story Editor MARCEL DURAND + Associate Producers MARTIN MARCEL + DIDIER MARTIN + Music Supervisor JEAN DIDIER + + """ + + # PARSE THE TXT FILE + texts = [] + oneline = True + + with open(creditfile) as f: + for l in f: + if l.startswith(('\n', '#')): + # exclude blank lines or comments + continue + elif l.startswith('.blank'): + # ..blank n + for i in range(int(l.split(' ')[1])): + texts.append(['\n', '\n']) + elif l.startswith('..'): + texts.append([l[2:], '']) + oneline = True + elif oneline: + texts.append(['', l]) + oneline = False + else: + texts.append(['\n', l]) + + left, right = ("".join(l) for l in zip(*texts)) + + # MAKE TWO COLUMNS FOR THE CREDITS + left, right = [TextClip(txt, color=color, stroke_color=stroke_color, + stroke_width=stroke_width, font=font, + fontsize=fontsize, align=al) + for txt, al in [(left, 'East'), (right, 'West')]] + + cc = CompositeVideoClip([left, right.set_position((left.w + gap, 0))], + size=(left.w + right.w + gap, right.h), + bg_color=None) + + # SCALE TO THE REQUIRED SIZE + + scaled = resize(cc, width=width) + + # TRANSFORM THE WHOLE CREDIT CLIP INTO AN ImageCLip + + imclip = ImageClip(scaled.get_frame(0)) + amask = ImageClip(scaled.mask.get_frame(0), ismask=True) + + return imclip.set_mask(amask) diff --git a/.venv/Lib/site-packages/moviepy/video/tools/cuts.py b/.venv/Lib/site-packages/moviepy/video/tools/cuts.py new file mode 100644 index 00000000..de75f788 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/tools/cuts.py @@ -0,0 +1,328 @@ +""" This module contains everything that can help automatize +the cuts in MoviePy """ + +from collections import defaultdict + +import numpy as np + +from moviepy.decorators import use_clip_fps_by_default + + +@use_clip_fps_by_default +def find_video_period(clip,fps=None,tmin=.3): + """ Finds the period of a video based on frames correlation """ + + frame = lambda t: clip.get_frame(t).flatten() + tt = np.arange(tmin, clip.duration, 1.0/ fps)[1:] + ref = frame(0) + corrs = [ np.corrcoef(ref, frame(t))[0,1] for t in tt] + return tt[np.argmax(corrs)] + + +class FramesMatch: + """ + + Parameters + ----------- + + t1 + Starting time + + t2 + End time + + d_min + Lower bound on the distance between the first and last frames + + d_max + Upper bound on the distance between the first and last frames + + """ + + def __init__(self, t1, t2, d_min, d_max): + self.t1 = t1 + self.t2 = t2 + self.d_min = d_min + self.d_max = d_max + self.time_span = t2-t1 + + def __str__(self): + + return '(%.04f, %.04f, %.04f, %.04f)'%( + self.t1, self.t2, self.d_min, self.d_max) + + def __repr__(self): + return '(%.04f, %.04f, %.04f, %.04f)'%( + self.t1, self.t2, self.d_min, self.d_max) + + def __iter__(self): + return iter((self.t1, self.t2, self.d_min, self.d_max)) + + +class FramesMatches(list): + + def __init__(self, lst): + + list.__init__(self, sorted(lst, key=lambda e: e.d_max)) + + def best(self, n=1, percent=None): + if percent is not None: + n = len(self)*percent/100 + return self[0] if n==1 else FramesMatches(self[:n]) + + def filter(self, cond): + """ + Returns a FramesMatches object obtained by filtering out the FramesMatch + which do not satistify the condition ``cond``. ``cond`` is a function + (FrameMatch -> bool). + + Examples + --------- + >>> # Only keep the matches corresponding to (> 1 second) sequences. + >>> new_matches = matches.filter( lambda match: match.time_span > 1) + """ + return FramesMatches(filter(cond, self)) + + def save(self, filename): + np.savetxt(filename, np.array([np.array(list(e)) for e in self]), + fmt='%.03f', delimiter='\t') + + @staticmethod + def load(filename): + """ Loads a FramesMatches object from a file. + >>> matching_frames = FramesMatches.load("somefile") + """ + arr = np.loadtxt(filename) + mfs = [FramesMatch(*e) for e in arr] + return FramesMatches(mfs) + + + + @staticmethod + def from_clip(clip, dist_thr, max_d, fps=None): + """ Finds all the frames tht look alike in a clip, for instance to make a + looping gif. + + This teturns a FramesMatches object of the all pairs of frames with + (t2-t1 < max_d) and whose distance is under dist_thr. + + This is well optimized routine and quite fast. + + Examples + --------- + + We find all matching frames in a given video and turn the best match with + a duration of 1.5s or more into a GIF: + + >>> from moviepy.editor import VideoFileClip + >>> from moviepy.video.tools.cuts import find_matching_frames + >>> clip = VideoFileClip("foo.mp4").resize(width=200) + >>> matches = find_matching_frames(clip, 10, 3) # will take time + >>> best = matches.filter(lambda m: m.time_span > 1.5).best() + >>> clip.subclip(best.t1, best.t2).write_gif("foo.gif") + + Parameters + ----------- + + clip + A MoviePy video clip, possibly transformed/resized + + dist_thr + Distance above which a match is rejected + + max_d + Maximal duration (in seconds) between two matching frames + + fps + Frames per second (default will be clip.fps) + + """ + + N_pixels = clip.w * clip.h * 3 + dot_product = lambda F1, F2: (F1*F2).sum()/N_pixels + F = {} # will store the frames and their mutual distances + + def distance(t1, t2): + uv = dot_product(F[t1]['frame'], F[t2]['frame']) + u, v = F[t1]['|F|sq'], F[t2]['|F|sq'] + return np.sqrt(u+v - 2*uv) + + matching_frames = [] # the final result. + + for (t,frame) in clip.iter_frames(with_times=True, logger='bar'): + + flat_frame = 1.0*frame.flatten() + F_norm_sq = dot_product(flat_frame, flat_frame) + F_norm = np.sqrt(F_norm_sq) + + for t2 in list(F.keys()): + # forget old frames, add 't' to the others frames + # check for early rejections based on differing norms + if (t-t2) > max_d: + F.pop(t2) + else: + F[t2][t] = {'min':abs(F[t2]['|F|'] - F_norm), + 'max':F[t2]['|F|'] + F_norm} + F[t2][t]['rejected']= (F[t2][t]['min'] > dist_thr) + + t_F = sorted(F.keys()) + + F[t] = {'frame': flat_frame, '|F|sq': F_norm_sq, '|F|': F_norm} + + for i,t2 in enumerate(t_F): + # Compare F(t) to all the previous frames + + if F[t2][t]['rejected']: + continue + + dist = distance(t, t2) + F[t2][t]['min'] = F[t2][t]['max'] = dist + F[t2][t]['rejected'] = (dist >= dist_thr) + + for t3 in t_F[i+1:]: + # For all the next times t3, use d(F(t), F(t2)) to + # update the bounds on d(F(t), F(t3)). See if you can + # conclude on wether F(t) and F(t3) match. + t3t, t2t3 = F[t3][t], F[t2][t3] + t3t['max'] = min(t3t['max'], dist+ t2t3['max']) + t3t['min'] = max(t3t['min'], dist - t2t3['max'], + t2t3['min'] - dist) + + if t3t['min'] > dist_thr: + t3t['rejected'] = True + + # Store all the good matches (t2,t) + matching_frames += [(t1, t, F[t1][t]['min'], F[t1][t]['max']) for t1 in F + if (t1!=t) and not F[t1][t]['rejected']] + + return FramesMatches([FramesMatch(*e) for e in matching_frames]) + + + + def select_scenes(self, match_thr, min_time_span, nomatch_thr=None, + time_distance=0): + """ + + match_thr + The smaller, the better-looping the gifs are. + + min_time_span + Only GIFs with a duration longer than min_time_span (in seconds) + will be extracted. + + nomatch_thr + If None, then it is chosen equal to match_thr + + """ + + if nomatch_thr is None: + nomatch_thr = match_thr + + dict_starts = defaultdict(lambda : []) + for (start, end, d_min, d_max) in self: + dict_starts[start].append([end, d_min, d_max]) + + starts_ends = sorted(dict_starts.items(), key = lambda k: k[0]) + + result = [] + min_start= 0 + for start, ends_distances in starts_ends: + + if start < min_start: + continue + + ends = [end for (end, d_min, d_max) in ends_distances] + great_matches = [(end,d_min, d_max) + for (end,d_min, d_max) in ends_distances + if d_maxmin_time_span] + + + if not great_long_matches: + continue # No GIF can be made starting at this time + + poor_matches = {end for (end,d_min, d_max) in ends_distances if d_min > nomatch_thr} + short_matches = {end for end in ends if (end-start) <= 0.6} + + if not poor_matches.intersection(short_matches): + continue + + end = max(end for (end, d_min, d_max) in great_long_matches) + end, d_min, d_max = next(e for e in great_long_matches if e[0]==end) + + result.append(FramesMatch(start, end, d_min, d_max)) + min_start = start + time_distance + + return FramesMatches(result) + + + def write_gifs(self, clip, gif_dir): + for (start, end, _, _) in self: + name = "%s/%08d_%08d.gif" % (gif_dir, 100*start, 100*end) + clip.subclip(start, end).write_gif(name, verbose=False) + + + + +@use_clip_fps_by_default +def detect_scenes(clip=None, luminosities=None, thr=10, + logger='bar', fps=None): + """ Detects scenes of a clip based on luminosity changes. + + Note that for large clip this may take some time + + Returns + -------- + cuts, luminosities + cuts is a series of cuts [(0,t1), (t1,t2),...(...,tf)] + luminosities are the luminosities computed for each + frame of the clip. + + Parameters + ----------- + + clip + A video clip. Can be None if a list of luminosities is + provided instead. If provided, the luminosity of each + frame of the clip will be computed. If the clip has no + 'fps' attribute, you must provide it. + + luminosities + A list of luminosities, e.g. returned by detect_scenes + in a previous run. + + thr + Determines a threshold above which the 'luminosity jumps' + will be considered as scene changes. A scene change is defined + as a change between 2 consecutive frames that is larger than + (avg * thr) where avg is the average of the absolute changes + between consecutive frames. + + progress_bar + We all love progress bars ! Here is one for you, in option. + + fps + Must be provided if you provide no clip or a clip without + fps attribute. + + + """ + if luminosities is None: + luminosities = [f.sum() for f in clip.iter_frames( + fps=fps, dtype='uint32', logger=logger)] + + luminosities = np.array(luminosities, dtype=float) + if clip is not None: + end = clip.duration + else: + end = len(luminosities)*(1.0/fps) + lum_diffs = abs(np.diff(luminosities)) + avg = lum_diffs.mean() + luminosity_jumps = 1+np.array(np.nonzero(lum_diffs> thr*avg))[0] + tt = [0]+list((1.0/fps) *luminosity_jumps) + [end] + #print tt + cuts = [(t1,t2) for t1,t2 in zip(tt,tt[1:])] + return cuts, luminosities diff --git a/.venv/Lib/site-packages/moviepy/video/tools/drawing.py b/.venv/Lib/site-packages/moviepy/video/tools/drawing.py new file mode 100644 index 00000000..fdc37055 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/tools/drawing.py @@ -0,0 +1,269 @@ +""" +This module deals with making images (np arrays). It provides drawing +methods that are difficult to do with the existing Python libraries. +""" + +import numpy as np + + +def blit(im1, im2, pos=None, mask=None, ismask=False): + """ Blit an image over another. + + Blits ``im1`` on ``im2`` as position ``pos=(x,y)``, using the + ``mask`` if provided. If ``im1`` and ``im2`` are mask pictures + (2D float arrays) then ``ismask`` must be ``True``. + """ + if pos is None: + pos = [0, 0] + + # xp1,yp1,xp2,yp2 = blit area on im2 + # x1,y1,x2,y2 = area of im1 to blit on im2 + xp, yp = pos + x1 = max(0, -xp) + y1 = max(0, -yp) + h1, w1 = im1.shape[:2] + h2, w2 = im2.shape[:2] + xp2 = min(w2, xp + w1) + yp2 = min(h2, yp + h1) + x2 = min(w1, w2 - xp) + y2 = min(h1, h2 - yp) + xp1 = max(0, xp) + yp1 = max(0, yp) + + if (xp1 >= xp2) or (yp1 >= yp2): + return im2 + + blitted = im1[y1:y2, x1:x2] + + new_im2 = +im2 + + if mask is None: + new_im2[yp1:yp2, xp1:xp2] = blitted + else: + mask = mask[y1:y2, x1:x2] + if len(im1.shape) == 3: + mask = np.dstack(3 * [mask]) + blit_region = new_im2[yp1:yp2, xp1:xp2] + new_im2[yp1:yp2, xp1:xp2] = (1.0 * mask * blitted + (1.0 - mask) * blit_region) + + return new_im2.astype('uint8') if (not ismask) else new_im2 + + + +def color_gradient(size,p1,p2=None,vector=None, r=None, col1=0,col2=1.0, + shape='linear', offset = 0): + """Draw a linear, bilinear, or radial gradient. + + The result is a picture of size ``size``, whose color varies + gradually from color `col1` in position ``p1`` to color ``col2`` + in position ``p2``. + + If it is a RGB picture the result must be transformed into + a 'uint8' array to be displayed normally: + + + Parameters + ------------ + + size + Size (width, height) in pixels of the final picture/array. + + p1, p2 + Coordinates (x,y) in pixels of the limit point for ``col1`` + and ``col2``. The color 'before' ``p1`` is ``col1`` and it + gradually changes in the direction of ``p2`` until it is ``col2`` + when it reaches ``p2``. + + vector + A vector [x,y] in pixels that can be provided instead of ``p2``. + ``p2`` is then defined as (p1 + vector). + + col1, col2 + Either floats between 0 and 1 (for gradients used in masks) + or [R,G,B] arrays (for colored gradients). + + shape + 'linear', 'bilinear', or 'circular'. + In a linear gradient the color varies in one direction, + from point ``p1`` to point ``p2``. + In a bilinear gradient it also varies symetrically form ``p1`` + in the other direction. + In a circular gradient it goes from ``col1`` to ``col2`` in all + directions. + + offset + Real number between 0 and 1 indicating the fraction of the vector + at which the gradient actually starts. For instance if ``offset`` + is 0.9 in a gradient going from p1 to p2, then the gradient will + only occur near p2 (before that everything is of color ``col1``) + If the offset is 0.9 in a radial gradient, the gradient will + occur in the region located between 90% and 100% of the radius, + this creates a blurry disc of radius d(p1,p2). + + Returns + -------- + + image + An Numpy array of dimensions (W,H,ncolors) of type float + representing the image of the gradient. + + + Examples + --------- + + >>> grad = color_gradient(blabla).astype('uint8') + + """ + + # np-arrayize and change x,y coordinates to y,x + w,h = size + + col1 = np.array(col1).astype(float) + col2 = np.array(col2).astype(float) + + if shape == 'bilinear': + if vector is None: + vector = np.array(p2) - np.array(p1) + + m1, m2 = [ color_gradient(size, p1, vector=v, col1 = 1.0, col2 = 0, + shape = 'linear', offset= offset) + for v in [vector,-vector]] + + arr = np.maximum(m1, m2) + if col1.size > 1: + arr = np.dstack(3*[arr]) + return arr*col1 + (1-arr)*col2 + + + p1 = np.array(p1[::-1]).astype(float) + + if vector is None and p2: + p2 = np.array(p2[::-1]) + vector = p2-p1 + else: + vector = np.array(vector[::-1]) + p2 = p1 + vector + + if vector: + norm = np.linalg.norm(vector) + + M = np.dstack(np.meshgrid(range(w),range(h))[::-1]).astype(float) + + if shape == 'linear': + + n_vec = vector/norm**2 # norm 1/norm(vector) + + p1 = p1 + offset*vector + arr = (M- p1).dot(n_vec)/(1-offset) + arr = np.minimum(1,np.maximum(0,arr)) + if col1.size > 1: + arr = np.dstack(3*[arr]) + return arr*col1 + (1-arr)*col2 + + elif shape == 'radial': + if r is None: + r = norm + + if r == 0: + arr = np.ones((h,w)) + else: + arr = (np.sqrt(((M - p1) ** 2).sum(axis=2))) - offset * r + arr = arr / ((1-offset)*r) + arr = np.minimum(1.0, np.maximum(0, arr)) + + if col1.size > 1: + arr = np.dstack(3*[arr]) + return (1-arr)*col1 + arr*col2 + + +def color_split(size,x=None,y=None,p1=None,p2=None,vector=None, + col1=0,col2=1.0, grad_width=0): + """Make an image splitted in 2 colored regions. + + Returns an array of size ``size`` divided in two regions called 1 and + 2 in wht follows, and which will have colors col& and col2 + respectively. + + Parameters + ----------- + + x: (int) + If provided, the image is splitted horizontally in x, the left + region being region 1. + + y: (int) + If provided, the image is splitted vertically in y, the top region + being region 1. + + p1,p2: + Positions (x1,y1),(x2,y2) in pixels, where the numbers can be + floats. Region 1 is defined as the whole region on the left when + going from ``p1`` to ``p2``. + + p1, vector: + ``p1`` is (x1,y1) and vector (v1,v2), where the numbers can be + floats. Region 1 is then the region on the left when starting + in position ``p1`` and going in the direction given by ``vector``. + + gradient_width + If not zero, the split is not sharp, but gradual over a region of + width ``gradient_width`` (in pixels). This is preferable in many + situations (for instance for antialiasing). + + + Examples + --------- + + >>> size = [200,200] + >>> # an image with all pixels with x<50 =0, the others =1 + >>> color_split(size, x=50, col1=0, col2=1) + >>> # an image with all pixels with y<50 red, the others green + >>> color_split(size, x=50, col1=[255,0,0], col2=[0,255,0]) + >>> # An image splitted along an arbitrary line (see below) + >>> color_split(size, p1=[20,50], p2=[25,70] col1=0, col2=1) + + """ + + if grad_width or ( (x is None) and (y is None)): + if p2 is not None: + vector = (np.array(p2) - np.array(p1)) + elif x is not None: + vector = np.array([0,-1.0]) + p1 = np.array([x, 0]) + elif y is not None: + vector = np.array([1.0, 0.0]) + p1 = np.array([0,y]) + + x,y = vector + vector = np.array([y,-x]).astype('float') + norm = np.linalg.norm(vector) + vector = max(0.1, grad_width) * vector / norm + return color_gradient(size,p1,vector=vector, + col1 = col1, col2 = col2, shape='linear') + else: + w, h = size + shape = (h, w) if np.isscalar(col1) else (h, w, len(col1)) + arr = np.zeros(shape) + if x: + arr[:,:x] = col1 + arr[:,x:] = col2 + elif y: + arr[:y] = col1 + arr[y:] = col2 + return arr + + # if we are here, it means we didn't exit with a proper 'return' + print( "Arguments in color_split not understood !" ) + raise + +def circle(screensize, center, radius, col1=1.0, col2=0, blur=1): + """ Draw an image with a circle. + + Draws a circle of color ``col1``, on a background of color ``col2``, + on a screen of size ``screensize`` at the position ``center=(x,y)``, + with a radius ``radius`` but slightly blurred on the border by ``blur`` + pixels + """ + offset = 1.0*(radius-blur)/radius if radius else 0 + return color_gradient(screensize,p1=center,r=radius, col1=col1, + col2=col2, shape='radial', offset=offset) diff --git a/.venv/Lib/site-packages/moviepy/video/tools/interpolators.py b/.venv/Lib/site-packages/moviepy/video/tools/interpolators.py new file mode 100644 index 00000000..9d1f4955 --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/tools/interpolators.py @@ -0,0 +1,73 @@ +""" +Classes for easy interpolation of trajectories and Curves. +Requires Scipy installed. +""" + +import numpy as np + + +class Interpolator: + """ Poorman's linear interpolator, doesn't require Scipy. """ + + def __init__(self, tt=None, ss=None, ttss = None, left=None, right=None): + + if ttss is not None: + tt, ss = zip(*ttss) + + self.tt = 1.0*np.array(tt) + self.ss = 1.0*np.array(ss) + self.left = left + self.right = right + self.tmin, self.tmax = min(tt), max(tt) + + def __call__(self, t): + return np.interp(t, self.tt, self.ss, self.left, self.right) + +class Trajectory: + + def __init__(self, tt, xx, yy): + + self.tt = 1.0*np.array(tt) + self.xx = np.array(xx) + self.yy = np.array(yy) + self.update_interpolators() + + def __call__(self, t): + return np.array([self.xi(t), self.yi(t)]) + + def addx(self, x): + return Trajectory(self.tt, self.xx+x, self.yy) + + def addy(self, y): + return Trajectory(self.tt, self.xx, self.yy+y) + + def update_interpolators(self): + self.xi = Interpolator(self.tt, self.xx) + self.yi = Interpolator(self.tt, self.yy) + + def txy(self, tms=False): + return zip((1000 if tms else 1)*self.tt, self.xx, self.yy) + + def to_file(self, filename): + np.savetxt(filename, np.array(self.txy(tms=True)), + fmt="%d", delimiter='\t') + + @staticmethod + def from_file(filename): + arr = np.loadtxt(filename, delimiter='\t') + tt, xx, yy = arr.T + return Trajectory(1.0*tt/1000, xx, yy) + + @staticmethod + def save_list(trajs, filename): + N = len(trajs) + arr = np.hstack([np.array(list(t.txy(tms=True))) for t in trajs]) + np.savetxt( filename, arr, fmt="%d", delimiter='\t', + header = "\t".join(N*['t(ms)', 'x', 'y'])) + + @staticmethod + def load_list(filename): + arr = np.loadtxt(filename, delimiter='\t').T + Nlines = arr.shape[0] + return [Trajectory(tt=1.0*a[0]/1000, xx=a[1], yy=a[2]) + for a in np.split(arr, Nlines/3)] diff --git a/.venv/Lib/site-packages/moviepy/video/tools/segmenting.py b/.venv/Lib/site-packages/moviepy/video/tools/segmenting.py new file mode 100644 index 00000000..1e6c06bf --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/tools/segmenting.py @@ -0,0 +1,59 @@ +import numpy as np + +import scipy.ndimage as ndi +from moviepy.video.VideoClip import ImageClip + + +def findObjects(clip,rem_thr=500, preview=False): + """ + Returns a list of ImageClips representing each a separate object on + the screen. + + rem_thr : all objects found with size < rem_Thr will be + considered false positives and will be removed + + """ + + image = clip.get_frame(0) + if not clip.mask: + clip = clip.add_mask() + + mask = clip.mask.get_frame(0) + labelled, num_features = ndi.measurements.label(image[:,:,0]) + + #find the objects + slices = [] + for e in ndi.find_objects(labelled): + if mask[e[0],e[1]].mean() <= 0.2: + # remove letter holes (in o,e,a, etc.) + continue + if image[e[0],e[1]].size <= rem_thr: + # remove very small slices + continue + slices.append(e) + islices = sorted(enumerate(slices), key = lambda s : s[1][1].start) + + letters = [] + for i,(ind,(sy,sx)) in enumerate(islices): + """ crop each letter separately """ + sy = slice(sy.start-1,sy.stop+1) + sx = slice(sx.start-1,sx.stop+1) + letter = image[sy,sx] + labletter = labelled[sy,sx] + maskletter = (labletter==(ind+1))*mask[sy,sx] + letter = ImageClip(image[sy,sx]) + letter.mask = ImageClip( maskletter,ismask=True) + letter.screenpos = np.array((sx.start,sy.start)) + letters.append(letter) + + if preview: + import matplotlib.pyplot as plt + print( "found %d objects"%(num_features) ) + fig,ax = plt.subplots(2) + ax[0].axis('off') + ax[0].imshow(labelled) + ax[1].imshow([range(num_features)],interpolation='nearest') + ax[1].set_yticks([]) + plt.show() + + return letters diff --git a/.venv/Lib/site-packages/moviepy/video/tools/subtitles.py b/.venv/Lib/site-packages/moviepy/video/tools/subtitles.py new file mode 100644 index 00000000..34a8615c --- /dev/null +++ b/.venv/Lib/site-packages/moviepy/video/tools/subtitles.py @@ -0,0 +1,163 @@ +""" Experimental module for subtitles support. """ + +import re + +import numpy as np + +from moviepy.tools import cvsecs +from moviepy.video.VideoClip import TextClip, VideoClip + + +class SubtitlesClip(VideoClip): + """ A Clip that serves as "subtitle track" in videos. + + One particularity of this class is that the images of the + subtitle texts are not generated beforehand, but only if + needed. + + Parameters + ========== + + subtitles + Either the name of a file, or a list + + Examples + ========= + + >>> from moviepy.video.tools.subtitles import SubtitlesClip + >>> from moviepy.video.io.VideoFileClip import VideoFileClip + >>> generator = lambda txt: TextClip(txt, font='Georgia-Regular', fontsize=24, color='white') + >>> sub = SubtitlesClip("subtitles.srt", generator) + >>> myvideo = VideoFileClip("myvideo.avi") + >>> final = CompositeVideoClip([clip, subtitles]) + >>> final.write_videofile("final.mp4", fps=myvideo.fps) + + """ + + def __init__(self, subtitles, make_textclip=None): + + VideoClip.__init__(self, has_constant_size=False) + + if isinstance(subtitles, str): + subtitles = file_to_subtitles(subtitles) + + #subtitles = [(map(cvsecs, tt),txt) for tt, txt in subtitles] + self.subtitles = subtitles + self.textclips = dict() + + if make_textclip is None: + make_textclip = lambda txt: TextClip(txt, font='Georgia-Bold', + fontsize=24, color='white', + stroke_color='black', stroke_width=0.5) + + self.make_textclip = make_textclip + self.start=0 + self.duration = max([tb for ((ta,tb), txt) in self.subtitles]) + self.end=self.duration + + def add_textclip_if_none(t): + """ Will generate a textclip if it hasn't been generated asked + to generate it yet. If there is no subtitle to show at t, return + false. """ + sub =[((ta,tb),txt) for ((ta,tb),txt) in self.textclips.keys() + if (ta<=t>> from moviepy.editor import VideoFileClip + >>> from moviepy.video.tools.tracking import manual_tracking + >>> clip = VideoFileClip("myvideo.mp4") + >>> # manually indicate 3 trajectories, save them to a file + >>> trajectories = manual_tracking(clip, t1=5, t2=7, fps=5, + nobjects=3, savefile="track.txt") + >>> # ... + >>> # LATER, IN ANOTHER SCRIPT, RECOVER THESE TRAJECTORIES + >>> from moviepy.video.tools.tracking import Trajectory + >>> traj1, traj2, traj3 = Trajectory.load_list('track.txt') + >>> # If ever you only have one object being tracked, recover it with + >>> traj, = Trajectory.load_list('track.txt') + + """ + + import pygame as pg + + screen = pg.display.set_mode(clip.size) + step = 1.0 / fps + if (t1 is None) and (t2 is None): + t1,t2 = 0, clip.duration + elif (t2 is None): + t2 = t1 + step / 2 + t = t1 + txy_list = [] + + def gatherClicks(t): + + imdisplay(clip.get_frame(t), screen) + objects_to_click = nobjects + clicks = [] + while objects_to_click: + + for event in pg.event.get(): + + if event.type == pg.KEYDOWN: + if (event.key == pg.K_BACKSLASH): + return "return" + elif (event.key == pg.K_ESCAPE): + raise KeyboardInterrupt() + + + elif event.type == pg.MOUSEBUTTONDOWN: + x, y = pg.mouse.get_pos() + clicks.append((x, y)) + objects_to_click -= 1 + + return clicks + + while t < t2: + + clicks =gatherClicks(t) + if clicks == 'return': + txy_list.pop() + t -= step + else: + txy_list.append((t,clicks)) + t += step + + tt, xylist = zip(*txy_list) + result = [] + for i in range(nobjects): + xys = [e[i] for e in xylist] + xx, yy = zip(*xys) + result.append(Trajectory(tt, xx, yy)) + + if savefile is not None: + Trajectory.save_list(result, savefile) + return result + + +# AUTOMATED TRACKING OF A PATTERN + +def findAround(pic,pat,xy=None,r=None): + """ + find image pattern ``pat`` in ``pic[x +/- r, y +/- r]``. + if xy is none, consider the whole picture. + """ + + if xy and r: + h,w = pat.shape[:2] + x,y = xy + pic = pic[y-r : y+h+r , x-r : x+w+r] + + matches = cv2.matchTemplate(pat,pic,cv2.TM_CCOEFF_NORMED) + yf,xf = np.unravel_index(matches.argmax(),matches.shape) + return (x-r+xf,y-r+yf) if (xy and r) else (xf,yf) + + +def autoTrack(clip, pattern, tt=None, fps=None, radius=20, xy0=None): + """ + Tracks a given pattern (small image array) in a video clip. + Returns [(x1,y1),(x2,y2)...] where xi,yi are + the coordinates of the pattern in the clip on frame i. + To select the frames you can either specify a list of times with ``tt`` + or select a frame rate with ``fps``. + This algorithm assumes that the pattern's aspect does not vary much + and that the distance between two occurences of the pattern in + two consecutive frames is smaller than ``radius`` (if you set ``radius`` + to -1 the pattern will be searched in the whole screen at each frame). + You can also provide the original position of the pattern with xy0. + """ + + if not autotracking_possible: + raise IOError("Sorry, autotrack requires OpenCV for the moment. " + "Install OpenCV (aka cv2) to use it.") + + + if not xy0: + xy0 = findAround(clip.get_frame(tt[0]),pattern) + + if tt is None: + tt = np.arange(0, clip.duration, 1.0/fps) + + xys = [xy0] + for t in tt[1:]: + xys.append( findAround(clip.get_frame(t),pattern, + xy=xys[-1],r=radius)) + + xx,yy = zip(*xys) + + return Trajectory(tt, xx, yy) diff --git a/.venv/Lib/site-packages/mpl_toolkits/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/mpl_toolkits/__pycache__/__init__.cpython-311.pyc index 9d12ff4c..cbf4509b 100644 Binary files a/.venv/Lib/site-packages/mpl_toolkits/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/mpl_toolkits/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/__init__.cpython-311.pyc index 5d1c8b51..2267a82b 100644 Binary files a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/art3d.cpython-311.pyc b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/art3d.cpython-311.pyc index 2533240f..a516e129 100644 Binary files a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/art3d.cpython-311.pyc and b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/art3d.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-311.pyc b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-311.pyc index 767bee4d..b640eefd 100644 Binary files a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-311.pyc and b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axis3d.cpython-311.pyc b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axis3d.cpython-311.pyc index e9704dc5..145d84d4 100644 Binary files a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axis3d.cpython-311.pyc and b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/axis3d.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/proj3d.cpython-311.pyc b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/proj3d.cpython-311.pyc index 83d8badc..7c54fe5e 100644 Binary files a/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/proj3d.cpython-311.pyc and b/.venv/Lib/site-packages/mpl_toolkits/mplot3d/__pycache__/proj3d.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/__init__.cpython-311.pyc index f33fc54e..a591b33e 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_base.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_base.cpython-311.pyc index fb11fabd..ac55bba8 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_base.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_fp.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_fp.cpython-311.pyc index f48fc024..dd02c0e2 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_fp.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_fp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_iv.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_iv.cpython-311.pyc index aa3d83c1..a753326c 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_iv.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_iv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp.cpython-311.pyc index 951a4c58..e1f45bed 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp_python.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp_python.cpython-311.pyc index 1e73b51e..c6104ebc 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp_python.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/ctx_mp_python.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc index 8841679a..1e956249 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/identification.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/identification.cpython-311.pyc index 14a0d184..cbde4cc5 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/identification.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/identification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/math2.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/math2.cpython-311.pyc index 773f7cad..344b8a3f 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/math2.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/math2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/rational.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/rational.cpython-311.pyc index 9b7f6c6b..e17864eb 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/rational.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/rational.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/usertools.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/usertools.cpython-311.pyc index 47f9a060..6a9a3384 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/usertools.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/usertools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/__pycache__/visualization.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/__pycache__/visualization.cpython-311.pyc index 2d6b4d03..5f276d54 100644 Binary files a/.venv/Lib/site-packages/mpmath/__pycache__/visualization.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/__pycache__/visualization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/__init__.cpython-311.pyc index 40359858..e3fd71fd 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/approximation.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/approximation.cpython-311.pyc index a88a6468..12dbb967 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/approximation.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/approximation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/calculus.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/calculus.cpython-311.pyc index 7a25019d..09b35c2f 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/calculus.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/calculus.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/differentiation.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/differentiation.cpython-311.pyc index 2baf0367..807bea46 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/differentiation.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/differentiation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/extrapolation.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/extrapolation.cpython-311.pyc index 0860103c..3391465b 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/extrapolation.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/extrapolation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/inverselaplace.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/inverselaplace.cpython-311.pyc index dc719a2e..f1522db5 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/inverselaplace.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/inverselaplace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/odes.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/odes.cpython-311.pyc index 706c379f..a43dfc65 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/odes.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/odes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/optimization.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/optimization.cpython-311.pyc index 0e2c3f41..8283b8dd 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/optimization.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/optimization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/polynomials.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/polynomials.cpython-311.pyc index 96ac1936..e96e3d6d 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/polynomials.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/polynomials.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/quadrature.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/quadrature.cpython-311.pyc index 37e5b831..b5b727be 100644 Binary files a/.venv/Lib/site-packages/mpmath/calculus/__pycache__/quadrature.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/calculus/__pycache__/quadrature.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/__init__.cpython-311.pyc index 530b2111..0a659318 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/bessel.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/bessel.cpython-311.pyc index 67745eac..a9a22b49 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/bessel.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/bessel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/elliptic.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/elliptic.cpython-311.pyc index 9ab85f3a..cc147468 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/elliptic.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/elliptic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/expintegrals.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/expintegrals.cpython-311.pyc index db83f8fe..4e857c26 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/expintegrals.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/expintegrals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/factorials.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/factorials.cpython-311.pyc index 06d11df2..5e3dba68 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/factorials.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/factorials.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/functions.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/functions.cpython-311.pyc index 097aa792..51bb9a9d 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/functions.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/hypergeometric.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/hypergeometric.cpython-311.pyc index 566ca69a..4646fcc2 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/hypergeometric.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/hypergeometric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/orthogonal.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/orthogonal.cpython-311.pyc index c0777064..c27a9d92 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/orthogonal.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/orthogonal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/qfunctions.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/qfunctions.cpython-311.pyc index e061afe0..3e09b5fb 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/qfunctions.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/qfunctions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/rszeta.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/rszeta.cpython-311.pyc index 6a151c52..ef4b2431 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/rszeta.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/rszeta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/signals.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/signals.cpython-311.pyc index 3322d53a..6835fd37 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/signals.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/signals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/theta.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/theta.cpython-311.pyc index aaad06a4..10f90d44 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/theta.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/theta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/zeta.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/zeta.cpython-311.pyc index 4070bbf4..e67c7d08 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/zeta.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/zeta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/functions/__pycache__/zetazeros.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/functions/__pycache__/zetazeros.cpython-311.pyc index f5696f21..5b57adbb 100644 Binary files a/.venv/Lib/site-packages/mpmath/functions/__pycache__/zetazeros.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/functions/__pycache__/zetazeros.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/__init__.cpython-311.pyc index d951c97f..356a3963 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/backend.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/backend.cpython-311.pyc index 8e5cad0c..b1b239d7 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/backend.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/gammazeta.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/gammazeta.cpython-311.pyc index 2c1db4ea..4e932829 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/gammazeta.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/gammazeta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libelefun.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libelefun.cpython-311.pyc index 6d674f4d..16aa348c 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libelefun.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libelefun.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libhyper.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libhyper.cpython-311.pyc index 4a04f0f3..4cdb2b75 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libhyper.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libhyper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libintmath.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libintmath.cpython-311.pyc index f1cb6a4d..27892bb2 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libintmath.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libintmath.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpc.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpc.cpython-311.pyc index 21e84f67..6f9c47c3 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpc.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpf.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpf.cpython-311.pyc index 3829d7d6..86d0007f 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpf.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpi.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpi.cpython-311.pyc index 2f6bf199..1092a4d8 100644 Binary files a/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpi.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/libmp/__pycache__/libmpi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/__init__.cpython-311.pyc index 197adf5d..f0fc46a7 100644 Binary files a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/calculus.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/calculus.cpython-311.pyc index b1ec280f..2904972a 100644 Binary files a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/calculus.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/calculus.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen.cpython-311.pyc index 8cbf6990..f2d55435 100644 Binary files a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen_symmetric.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen_symmetric.cpython-311.pyc index baa9ecd2..9fe35d63 100644 Binary files a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen_symmetric.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/eigen_symmetric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/linalg.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/linalg.cpython-311.pyc index 34da9ffa..b42e7bc1 100644 Binary files a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/linalg.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/linalg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/matrices.cpython-311.pyc b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/matrices.cpython-311.pyc index c4c2dd7f..5a1ecdf9 100644 Binary files a/.venv/Lib/site-packages/mpmath/matrices/__pycache__/matrices.cpython-311.pyc and b/.venv/Lib/site-packages/mpmath/matrices/__pycache__/matrices.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/msgpack/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/msgpack/__pycache__/__init__.cpython-311.pyc index 83dec14a..4e55ae38 100644 Binary files a/.venv/Lib/site-packages/msgpack/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/msgpack/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/msgpack/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/msgpack/__pycache__/exceptions.cpython-311.pyc index a3ca4f30..e7d68950 100644 Binary files a/.venv/Lib/site-packages/msgpack/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/msgpack/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/msgpack/__pycache__/ext.cpython-311.pyc b/.venv/Lib/site-packages/msgpack/__pycache__/ext.cpython-311.pyc index 2dd87995..672c7cb8 100644 Binary files a/.venv/Lib/site-packages/msgpack/__pycache__/ext.cpython-311.pyc and b/.venv/Lib/site-packages/msgpack/__pycache__/ext.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/multidict/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/multidict/__pycache__/__init__.cpython-311.pyc index c30e0174..f4f12d1b 100644 Binary files a/.venv/Lib/site-packages/multidict/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/multidict/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/multidict/__pycache__/_abc.cpython-311.pyc b/.venv/Lib/site-packages/multidict/__pycache__/_abc.cpython-311.pyc index 4bbb637b..10da3dbf 100644 Binary files a/.venv/Lib/site-packages/multidict/__pycache__/_abc.cpython-311.pyc and b/.venv/Lib/site-packages/multidict/__pycache__/_abc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/multidict/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/multidict/__pycache__/_compat.cpython-311.pyc index 4434baee..845d2869 100644 Binary files a/.venv/Lib/site-packages/multidict/__pycache__/_compat.cpython-311.pyc and b/.venv/Lib/site-packages/multidict/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/multidict/__pycache__/_multidict_base.cpython-311.pyc b/.venv/Lib/site-packages/multidict/__pycache__/_multidict_base.cpython-311.pyc index 1d72023d..57085ecc 100644 Binary files a/.venv/Lib/site-packages/multidict/__pycache__/_multidict_base.cpython-311.pyc and b/.venv/Lib/site-packages/multidict/__pycache__/_multidict_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/murmurhash/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/murmurhash/__pycache__/__init__.cpython-311.pyc index 3d1897db..d4867802 100644 Binary files a/.venv/Lib/site-packages/murmurhash/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/murmurhash/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/murmurhash/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/murmurhash/__pycache__/about.cpython-311.pyc index 5bcbaa6d..77d95407 100644 Binary files a/.venv/Lib/site-packages/murmurhash/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/murmurhash/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/__pycache__/__init__.cpython-311.pyc index 52ca2a95..e3aa95bd 100644 Binary files a/.venv/Lib/site-packages/networkx/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/__pycache__/convert.cpython-311.pyc b/.venv/Lib/site-packages/networkx/__pycache__/convert.cpython-311.pyc index a077d766..8fd087b8 100644 Binary files a/.venv/Lib/site-packages/networkx/__pycache__/convert.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/__pycache__/convert.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/__pycache__/convert_matrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/__pycache__/convert_matrix.cpython-311.pyc index 9ae82baf..37f94068 100644 Binary files a/.venv/Lib/site-packages/networkx/__pycache__/convert_matrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/__pycache__/convert_matrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/__pycache__/exception.cpython-311.pyc b/.venv/Lib/site-packages/networkx/__pycache__/exception.cpython-311.pyc index 328695a8..79a1c409 100644 Binary files a/.venv/Lib/site-packages/networkx/__pycache__/exception.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/__pycache__/exception.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/__pycache__/lazy_imports.cpython-311.pyc b/.venv/Lib/site-packages/networkx/__pycache__/lazy_imports.cpython-311.pyc index 6b0ae571..84f58d81 100644 Binary files a/.venv/Lib/site-packages/networkx/__pycache__/lazy_imports.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/__pycache__/lazy_imports.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/__pycache__/relabel.cpython-311.pyc b/.venv/Lib/site-packages/networkx/__pycache__/relabel.cpython-311.pyc index f2007931..282eb3cd 100644 Binary files a/.venv/Lib/site-packages/networkx/__pycache__/relabel.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/__pycache__/relabel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc index 8bb9e303..51d40bbf 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-311.pyc index 0e9289ae..e4f15430 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc index b3927449..95d6beef 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc index ce30c3dc..0fb083f2 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chains.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chains.cpython-311.pyc index ef2aa2f9..a4e71c4b 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chains.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chains.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc index abfbc271..2872bded 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc index ba08d29e..62fb620e 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cluster.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cluster.cpython-311.pyc index 4cb8dae6..54243afa 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cluster.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cluster.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-311.pyc index ccf61ad9..494767b2 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/core.cpython-311.pyc index 0bf5a0c3..df3c128f 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/covering.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/covering.cpython-311.pyc index 395ca82c..96158ed4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/covering.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/covering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc index eb904e76..73c64509 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc index 47304c55..cf7e5cf3 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-311.pyc index 3ded5b29..3acb3766 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc index 0293da62..e5975a41 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-311.pyc index 8da14104..03e4e2d5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-311.pyc index 4e16fd0e..38a86c46 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc index cb1f1985..669af5d7 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc index 81539991..5d45ba9b 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc index 24c5b0f1..c2c78091 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc index b73e5ded..8d115bbd 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc index 0b3bc175..85b20f30 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc index a68535b6..64dcfa35 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-311.pyc index 43f4f898..2eff6b68 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-311.pyc index 1f772b5e..efff644e 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc index 9636d871..6f735289 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc index 727869cf..af617d28 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-311.pyc index c83456d6..1d681326 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/matching.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/matching.cpython-311.pyc index dfc22ccc..f13c41af 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/matching.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/matching.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc index 22d886c0..18ef61ea 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/moral.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/moral.cpython-311.pyc index 50ff8be7..faee856d 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/moral.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/moral.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-311.pyc index c752deb7..b2d5a310 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc index 8d7921e9..2ad52e11 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planarity.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planarity.cpython-311.pyc index 64173b0f..243f8bbc 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planarity.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/planarity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-311.pyc index 05983f13..1690698b 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-311.pyc index a86f7b79..cb54e3d5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc index a7f023f6..53c9fdbc 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/richclub.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/richclub.cpython-311.pyc index ed9861fc..1afeb7ce 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/richclub.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/richclub.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc index aec7608a..aa718d32 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-311.pyc index 751518b8..fc762e87 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-311.pyc index 4cb5e856..7bc5e542 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smetric.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smetric.cpython-311.pyc index 397b9250..8797bce5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smetric.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/smetric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-311.pyc index d2e8aad9..99ea048d 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc index 336ebcb8..a1f6ce95 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc index e767b17e..37fc36a4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/swap.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/swap.cpython-311.pyc index 0ad8b9f8..30365218 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/swap.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/swap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/tournament.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/tournament.cpython-311.pyc index 9a62f831..33161a7c 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/tournament.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/tournament.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc index 8e41ae62..3900827e 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/vitality.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/vitality.cpython-311.pyc index a1f7d815..2c7fc630 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/vitality.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/vitality.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-311.pyc index f925d05a..97d7aa42 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/wiener.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/wiener.cpython-311.pyc index 8acfefb9..d8619de4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/__pycache__/wiener.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/__pycache__/wiener.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-311.pyc index e7600df3..8d6c6a9f 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-311.pyc index 92ae2f9f..e8920dd4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-311.pyc index 8359fc1d..f31bcd9a 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-311.pyc index d081e8d0..c972d374 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-311.pyc index 16920393..6d4dbfab 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-311.pyc index 5a82d15d..c2ede680 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-311.pyc index dcbc6e00..6a2dfeda 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc index 7544a290..f053d2d4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-311.pyc index e0137fe6..6130b5ba 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-311.pyc index 910d3688..82d872a1 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-311.pyc index 7895ac97..7d0b58fd 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-311.pyc index fb2e2f3c..dc0dc884 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-311.pyc index ac314776..e0569e23 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-311.pyc index b02de1a0..191860e4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-311.pyc index 2a9c48bf..6b1dab57 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc index e4827364..31a6f798 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-311.pyc index b5e067a2..30ab83ed 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc index 9b94ae95..9cc22bad 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-311.pyc index f89e0ce6..51cd9d22 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc index d09a88dd..696d5607 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-311.pyc index 0fb76887..424db3d7 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-311.pyc index e8ef13a4..d844e3e1 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-311.pyc index fabe784a..f685c90c 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-311.pyc index 61ff8abb..9753b552 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-311.pyc index 0c6a91df..21e857c7 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-311.pyc index 5298250f..8875a88a 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-311.pyc index 578719bc..18cf1c82 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-311.pyc index 09a2be83..779a6ced 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-311.pyc index aa1d4c01..c539e26a 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-311.pyc index 34582e96..818a3e27 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-311.pyc index ab6c3c7e..2b5c40b4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-311.pyc index aa3c1e6c..bf6a13d1 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc index 93f7a303..97b2a9e5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc index eab32578..d1c7c6ac 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc index e01c1dc4..c6fb8d2a 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc index e43d5392..e1bfaddd 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc index cc55fcbf..09b14c3c 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-311.pyc index 11659349..6eaa6a26 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-311.pyc index 9320a35b..d91551f6 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc index b17f8d08..4ebca2bb 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-311.pyc index a22e8184..7ae7ef6d 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc index 8a0b2abb..3e41c006 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc index ac681d4d..601cbbb0 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc index 4df8c5ec..1eb3f6ec 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc index 1c755b09..913a04dd 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc index b69f2a1c..f21c5a62 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc index d1c871bb..f618f108 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-311.pyc index 24d0f122..d82a907f 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc index 64f2be4a..dc55fb71 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-311.pyc index 6e53e8a1..af5e9f3d 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc index 8da37539..83551e64 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-311.pyc index adbb9fa8..24092dc6 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc index db06be8e..02894d4f 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-311.pyc index c28388b3..5dd88335 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-311.pyc index 0d7854ab..c237f1a8 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-311.pyc index b93f1fa5..8b16da9e 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-311.pyc index 89bd93ca..60c748ef 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-311.pyc index a58b3b6d..a7754076 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-311.pyc index 3c047845..b45d550a 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-311.pyc index c8ac089e..9f80c7c9 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-311.pyc index d392cb2a..8e9e5074 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc index cfb57234..1f13e488 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-311.pyc index 153aa1ef..c5de76c3 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-311.pyc index 2acd963c..1702cc54 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-311.pyc index 8e8e05ca..49fc72a5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-311.pyc index 94fc6c7f..113ae640 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-311.pyc index ed7ab07d..6001fe98 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-311.pyc index b6e303df..53b9d665 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc index b865ef31..6b7f4e8f 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc index 4ecaec67..2b399e59 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-311.pyc index ac8febf9..9bae3d54 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-311.pyc index 7798e04e..b7ad4238 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc index d60c5304..aeefddc4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-311.pyc index a835e937..88c46de7 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-311.pyc index fe00ff21..68050b35 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-311.pyc index ff26d5c1..ba9e8219 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-311.pyc index b7791194..80ee6efd 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-311.pyc index 745a57a0..5d8e8902 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-311.pyc index 52425e0a..10820abb 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-311.pyc index ab131445..dadc86be 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-311.pyc index 4ecdeb4d..5f2026cd 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-311.pyc index 89639306..a9931fef 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-311.pyc index d557eadc..6af6368b 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-311.pyc index 2a6da3fd..d4b03276 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-311.pyc index 923fade7..819d5708 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-311.pyc index 6d03a50d..c7af27d7 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-311.pyc index d588367a..ba84d49c 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-311.pyc index b5dfbcc1..84534a37 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-311.pyc index 3b6827f4..4fd821e8 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-311.pyc index 46ea7e5a..d97155bc 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-311.pyc index 6c344b48..01422ec5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-311.pyc index 71408a44..925379d4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-311.pyc index 813bd4aa..3b25d3ec 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-311.pyc index 61fbb31c..3b9d045c 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-311.pyc index 2faa9bff..54929205 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-311.pyc index 3d07392a..2536659b 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-311.pyc index 43b5a7ab..7e5cb6a5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-311.pyc index d2e4efc8..3b591ed2 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-311.pyc index 823be9c1..957cc2dc 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc index 648053b6..8c09e60c 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc index 79d49fd9..3bfab5bd 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc index d46f995b..b3c8e8c4 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc index 4bffa893..69b3d46e 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-311.pyc index 6069644c..fead9fbe 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-311.pyc index 91ff30b6..3bba554a 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-311.pyc index a4367a32..aee8bf42 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-311.pyc index ba487cef..f36780c9 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-311.pyc index 846e978e..8a5466e8 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-311.pyc index 5da02dad..bd27e106 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/node_classification/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/node_classification/__pycache__/__init__.cpython-311.pyc index b4255a21..70ff6f39 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/node_classification/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/node_classification/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-311.pyc index a336296d..1a034d0e 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-311.pyc index 26a065cf..41d976c3 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-311.pyc index 69002b49..ba762236 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-311.pyc index d48810ef..3e653157 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-311.pyc index 2284982d..4b5e51b8 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-311.pyc index 33e8b17b..3daeba34 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-311.pyc index afc06e5b..92d93316 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-311.pyc index 94acf932..0a538bd6 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-311.pyc index 3f0b789b..59c3b1e0 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-311.pyc index d59e8b86..af4f3d55 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-311.pyc index 0b18d744..e120f04f 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-311.pyc index b1d46825..0371beab 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-311.pyc index 7310f2c6..7fdc4dc5 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-311.pyc index 49922a82..8e10ca58 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-311.pyc index 44cf11a8..35cf3c1c 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-311.pyc index 2fea8835..19cc5525 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-311.pyc index 00c8602e..35581695 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-311.pyc index 592f3ebf..5d210b28 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-311.pyc index 3bd2b69c..fb1b5e60 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-311.pyc index f24fe938..20721dfe 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-311.pyc index d8d1d188..2ae87388 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-311.pyc index 634360fa..f4a966f7 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-311.pyc index 85def10d..2d68cf1f 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-311.pyc b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-311.pyc index 63c75e6d..ea951deb 100644 Binary files a/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/__init__.cpython-311.pyc index f1640ad8..7c51ac44 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/coreviews.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/coreviews.cpython-311.pyc index 9168b444..39aac058 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/coreviews.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/coreviews.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/digraph.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/digraph.cpython-311.pyc index 7307dab4..78eed8b2 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/digraph.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/digraph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/filters.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/filters.cpython-311.pyc index 51ef260b..fb3b5e52 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/filters.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/filters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/function.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/function.cpython-311.pyc index 9b2174e8..3194c5da 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/function.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/graph.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/graph.cpython-311.pyc index e409ab48..04627bd2 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/graph.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/graphviews.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/graphviews.cpython-311.pyc index 0ca56494..9e043647 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/graphviews.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/graphviews.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/multidigraph.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/multidigraph.cpython-311.pyc index 33393574..ed346edf 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/multidigraph.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/multidigraph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/multigraph.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/multigraph.cpython-311.pyc index bc21f515..7cb6acf0 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/multigraph.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/multigraph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/ordered.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/ordered.cpython-311.pyc index 2a0a8a8b..4b49469b 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/ordered.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/ordered.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/classes/__pycache__/reportviews.cpython-311.pyc b/.venv/Lib/site-packages/networkx/classes/__pycache__/reportviews.cpython-311.pyc index 3f508035..7e5c5e30 100644 Binary files a/.venv/Lib/site-packages/networkx/classes/__pycache__/reportviews.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/classes/__pycache__/reportviews.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/drawing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/drawing/__pycache__/__init__.cpython-311.pyc index dd455276..89efb2fb 100644 Binary files a/.venv/Lib/site-packages/networkx/drawing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/drawing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/drawing/__pycache__/layout.cpython-311.pyc b/.venv/Lib/site-packages/networkx/drawing/__pycache__/layout.cpython-311.pyc index fbbe05b9..f4ad615e 100644 Binary files a/.venv/Lib/site-packages/networkx/drawing/__pycache__/layout.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/drawing/__pycache__/layout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-311.pyc b/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-311.pyc index 4a24c077..d3cc9c16 100644 Binary files a/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-311.pyc b/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-311.pyc index d492696a..0d708d87 100644 Binary files a/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-311.pyc b/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-311.pyc index 8b82774f..cf46cec8 100644 Binary files a/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/__init__.cpython-311.pyc index 5589926a..c9de8435 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/atlas.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/atlas.cpython-311.pyc index b4caeb10..834322d3 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/atlas.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/atlas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/classic.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/classic.cpython-311.pyc index 01432686..cabdf69a 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/classic.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/classic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/cographs.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/cographs.cpython-311.pyc index fdf7900e..e9f09f07 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/cographs.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/cographs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/community.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/community.cpython-311.pyc index f5105424..51df9315 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/community.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/community.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/degree_seq.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/degree_seq.cpython-311.pyc index e7bffec3..ac0ec653 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/degree_seq.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/degree_seq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/directed.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/directed.cpython-311.pyc index c942afda..fb3b8bf8 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/directed.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/directed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/duplication.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/duplication.cpython-311.pyc index 28e557c2..15c7a315 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/duplication.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/duplication.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/ego.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/ego.cpython-311.pyc index 9b3005f1..83ce5183 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/ego.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/ego.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/expanders.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/expanders.cpython-311.pyc index d2b04200..5b2a70d7 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/expanders.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/expanders.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/geometric.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/geometric.cpython-311.pyc index 2fd9e953..3f2d12c5 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/geometric.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/geometric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-311.pyc index 778ec6d9..d0c13886 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/intersection.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/intersection.cpython-311.pyc index fad133ca..6602974a 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/intersection.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/intersection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/interval_graph.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/interval_graph.cpython-311.pyc index 49747848..39b31810 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/interval_graph.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/interval_graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-311.pyc index 5b0852b8..78d24058 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/lattice.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/lattice.cpython-311.pyc index d366861b..d0aa14cb 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/lattice.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/lattice.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/line.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/line.cpython-311.pyc index 386125da..4faeb24b 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/line.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/line.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/mycielski.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/mycielski.cpython-311.pyc index b72f3bef..8e871c60 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/mycielski.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/mycielski.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-311.pyc index c767d910..afe8f51c 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/random_clustered.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/random_clustered.cpython-311.pyc index 19a4e9b6..c74e4948 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/random_clustered.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/random_clustered.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/random_graphs.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/random_graphs.cpython-311.pyc index 58758de2..5cb93d34 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/random_graphs.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/random_graphs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/small.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/small.cpython-311.pyc index c05b1ad8..e186a826 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/small.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/small.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/social.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/social.cpython-311.pyc index 05380de3..ac7adf01 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/social.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/social.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-311.pyc index 89426a01..e390243e 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/stochastic.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/stochastic.cpython-311.pyc index 0f568118..5b34ddfa 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/stochastic.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/stochastic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/sudoku.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/sudoku.cpython-311.pyc index 1200885f..5538a8b9 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/sudoku.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/sudoku.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/trees.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/trees.cpython-311.pyc index 1f9cc096..2524beac 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/trees.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/trees.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/generators/__pycache__/triads.cpython-311.pyc b/.venv/Lib/site-packages/networkx/generators/__pycache__/triads.cpython-311.pyc index 24b1516f..e0f1a330 100644 Binary files a/.venv/Lib/site-packages/networkx/generators/__pycache__/triads.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/generators/__pycache__/triads.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/__init__.cpython-311.pyc index 31cfc971..d8a61378 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-311.pyc index 68970d3b..945fe756 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-311.pyc index 3040215a..fde80e4c 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-311.pyc index 17303b8c..6f974afa 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-311.pyc index b34f0b57..d3ae1e24 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-311.pyc index 711d3e8c..3912c940 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-311.pyc index b6cf01bc..28522998 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/linalg/__pycache__/spectrum.cpython-311.pyc b/.venv/Lib/site-packages/networkx/linalg/__pycache__/spectrum.cpython-311.pyc index 6c4bd5b4..926f1653 100644 Binary files a/.venv/Lib/site-packages/networkx/linalg/__pycache__/spectrum.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/linalg/__pycache__/spectrum.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/__init__.cpython-311.pyc index e7dbba3c..b3ad06d6 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-311.pyc index 3759b98e..f7c04335 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-311.pyc index 62c4d086..e476234a 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gexf.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gexf.cpython-311.pyc index 0dd150d1..dbef088c 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gexf.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gexf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gml.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gml.cpython-311.pyc index 814da96a..5195eeef 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gml.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gml.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gpickle.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gpickle.cpython-311.pyc index f9ab542e..9f589ee0 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gpickle.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/gpickle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graph6.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graph6.cpython-311.pyc index 0242732a..7fec0d0c 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graph6.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graph6.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graphml.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graphml.cpython-311.pyc index e15dd7a7..7a0e7512 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graphml.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/graphml.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/leda.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/leda.cpython-311.pyc index a4e31fe4..75746b9b 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/leda.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/leda.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-311.pyc index a3385518..61260ea2 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/nx_shp.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/nx_shp.cpython-311.pyc index 9b2446c6..bf08f0e5 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/nx_shp.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/nx_shp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/pajek.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/pajek.cpython-311.pyc index 8186c987..aa893537 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/pajek.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/pajek.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-311.pyc index c2c223fa..9f34e10c 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/text.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/text.cpython-311.pyc index c3abdb75..f03d6c20 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/__pycache__/text.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/__pycache__/text.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-311.pyc index cae49b2f..edbec9e2 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-311.pyc index e5a81d11..6c753642 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-311.pyc index 006d3e22..4e1b3af9 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/jit.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/jit.cpython-311.pyc index 287de198..3c5310d2 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/jit.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/jit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-311.pyc index 41d92146..9305f655 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-311.pyc b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-311.pyc index a9e9fcf5..7a323586 100644 Binary files a/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/testing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/testing/__pycache__/__init__.cpython-311.pyc index f7c2ffb9..ff9b6d55 100644 Binary files a/.venv/Lib/site-packages/networkx/testing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/testing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/testing/__pycache__/test.cpython-311.pyc b/.venv/Lib/site-packages/networkx/testing/__pycache__/test.cpython-311.pyc index 46039481..bae550c3 100644 Binary files a/.venv/Lib/site-packages/networkx/testing/__pycache__/test.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/testing/__pycache__/test.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/testing/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/networkx/testing/__pycache__/utils.cpython-311.pyc index 76442678..c4cd17dd 100644 Binary files a/.venv/Lib/site-packages/networkx/testing/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/testing/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/__init__.cpython-311.pyc index 2bef353e..4eb3254f 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/contextmanagers.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/contextmanagers.cpython-311.pyc index 658f43ed..0e705ef6 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/contextmanagers.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/contextmanagers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/decorators.cpython-311.pyc index 55a89515..6ac430a5 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/heaps.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/heaps.cpython-311.pyc index 7cb95a97..964314d4 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/heaps.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/heaps.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-311.pyc index e3632457..750ed0e2 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/misc.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/misc.cpython-311.pyc index 93de24c3..56326269 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/misc.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/random_sequence.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/random_sequence.cpython-311.pyc index 1765cddc..81d900fe 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/random_sequence.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/random_sequence.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/rcm.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/rcm.cpython-311.pyc index 5a969b52..eeb10d0d 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/rcm.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/rcm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/networkx/utils/__pycache__/union_find.cpython-311.pyc b/.venv/Lib/site-packages/networkx/utils/__pycache__/union_find.cpython-311.pyc index 4f4d63da..732fafe0 100644 Binary files a/.venv/Lib/site-packages/networkx/utils/__pycache__/union_find.cpython-311.pyc and b/.venv/Lib/site-packages/networkx/utils/__pycache__/union_find.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/__init__.cpython-311.pyc index 5aecf97e..a0d2dfd3 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/base.cpython-311.pyc index 91e4e781..ccb68e8d 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/compat.cpython-311.pyc index de113fa6..671ee5fa 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/currency.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/currency.cpython-311.pyc index d94f5548..846a3f8f 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/currency.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/currency.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_AM.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_AM.cpython-311.pyc index 87b60668..99359dd5 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_AM.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_AM.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_AR.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_AR.cpython-311.pyc index b2b86674..e08eb298 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_AR.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_AR.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_AZ.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_AZ.cpython-311.pyc index 270ef9b9..5adbb71e 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_AZ.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_AZ.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_BY.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_BY.cpython-311.pyc index 4cf9f0ad..ecf9d5de 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_BY.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_BY.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_CZ.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_CZ.cpython-311.pyc index dee5e85d..88fa8073 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_CZ.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_CZ.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_DE.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_DE.cpython-311.pyc index 7cef896d..f39c25eb 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_DE.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_DE.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_DK.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_DK.cpython-311.pyc index 985ac743..744d66e6 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_DK.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_DK.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_EN.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_EN.cpython-311.pyc index 6ac1129e..838edd58 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_EN.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_EN.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_IN.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_IN.cpython-311.pyc index 3aad949f..4fbbccb7 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_IN.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_IN.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_NG.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_NG.cpython-311.pyc index f8835284..6be82061 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_NG.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_EN_NG.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_EO.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_EO.cpython-311.pyc index 68b6a591..35dc0376 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_EO.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_EO.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES.cpython-311.pyc index 0cb30f54..ba2d4540 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_CO.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_CO.cpython-311.pyc index 21c9ba39..bc96a62c 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_CO.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_CO.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_GT.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_GT.cpython-311.pyc index d8f1504c..03574180 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_GT.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_GT.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_NI.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_NI.cpython-311.pyc index ea36838c..1f0c2293 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_NI.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_NI.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_VE.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_VE.cpython-311.pyc index b279c488..41f814d7 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_VE.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_ES_VE.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_EU.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_EU.cpython-311.pyc index f214b875..eda597c9 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_EU.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_EU.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_FA.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_FA.cpython-311.pyc index 19b18b7b..eb08d86c 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_FA.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_FA.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_FI.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_FI.cpython-311.pyc index 93906cc1..93d6bc65 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_FI.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_FI.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR.cpython-311.pyc index fe2540f3..453cf22e 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_BE.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_BE.cpython-311.pyc index 1b9b435a..0da88cec 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_BE.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_BE.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_CH.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_CH.cpython-311.pyc index c9d7d5ee..7a718924 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_CH.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_CH.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_DZ.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_DZ.cpython-311.pyc index 15891c08..506583d4 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_DZ.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_FR_DZ.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_HE.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_HE.cpython-311.pyc index c74ad87c..1c6de484 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_HE.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_HE.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_HU.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_HU.cpython-311.pyc index 06dcd3a7..fb970224 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_HU.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_HU.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_ID.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_ID.cpython-311.pyc index a014a825..a849eb5a 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_ID.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_ID.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_IS.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_IS.cpython-311.pyc index eaf08124..5fdf7df5 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_IS.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_IS.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_IT.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_IT.cpython-311.pyc index 2aa14ea2..af818bdf 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_IT.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_IT.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_JA.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_JA.cpython-311.pyc index af9f66f7..67635ffd 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_JA.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_JA.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_KN.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_KN.cpython-311.pyc index af3c9760..b7610413 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_KN.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_KN.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_KO.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_KO.cpython-311.pyc index 315e370c..22bb2522 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_KO.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_KO.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_KZ.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_KZ.cpython-311.pyc index 476486f5..12ea8335 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_KZ.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_KZ.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_LT.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_LT.cpython-311.pyc index 196fde26..8e5aab3e 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_LT.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_LT.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_LV.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_LV.cpython-311.pyc index 5f872014..5bc957d2 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_LV.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_LV.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_NL.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_NL.cpython-311.pyc index beba1620..2ad27609 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_NL.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_NL.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_NO.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_NO.cpython-311.pyc index 348e8140..5f066918 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_NO.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_NO.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_PL.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_PL.cpython-311.pyc index b70435f2..f60dc597 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_PL.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_PL.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_PT.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_PT.cpython-311.pyc index 9d58658d..3332bd3a 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_PT.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_PT.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_PT_BR.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_PT_BR.cpython-311.pyc index 36424dfb..7cf377a4 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_PT_BR.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_PT_BR.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_RO.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_RO.cpython-311.pyc index 6e6ca359..139ebc6a 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_RO.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_RO.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_RU.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_RU.cpython-311.pyc index 4fef1d1e..8c73e5bc 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_RU.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_RU.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_SK.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_SK.cpython-311.pyc index c4824dfe..5557720e 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_SK.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_SK.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_SL.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_SL.cpython-311.pyc index 588c826c..5cffa3a5 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_SL.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_SL.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_SR.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_SR.cpython-311.pyc index a0371317..98f0d131 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_SR.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_SR.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_SV.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_SV.cpython-311.pyc index 66649da0..1ef4f009 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_SV.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_SV.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_TE.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_TE.cpython-311.pyc index b223b386..2dcc870f 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_TE.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_TE.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_TG.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_TG.cpython-311.pyc index 6f35ca6d..3f0ad128 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_TG.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_TG.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_TH.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_TH.cpython-311.pyc index 80a24ef5..a05188b9 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_TH.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_TH.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_TR.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_TR.cpython-311.pyc index 5cfc731f..07fc5f82 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_TR.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_TR.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_UK.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_UK.cpython-311.pyc index 95eca005..ac28f49b 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_UK.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_UK.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/lang_VI.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/lang_VI.cpython-311.pyc index b8ebf6d7..d4a475b1 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/lang_VI.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/lang_VI.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/num2words/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/num2words/__pycache__/utils.cpython-311.pyc index 8a8c9cdd..051a4e73 100644 Binary files a/.venv/Lib/site-packages/num2words/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/num2words/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/__pycache__/__init__.cpython-311.pyc index e34fedae..9a78fc20 100644 Binary files a/.venv/Lib/site-packages/numba/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/numba/__pycache__/_version.cpython-311.pyc index ff0e1b12..71bf3b0e 100644 Binary files a/.venv/Lib/site-packages/numba/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/numba/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/__pycache__/extending.cpython-311.pyc b/.venv/Lib/site-packages/numba/__pycache__/extending.cpython-311.pyc index 98d59e99..5ff4bbe7 100644 Binary files a/.venv/Lib/site-packages/numba/__pycache__/extending.cpython-311.pyc and b/.venv/Lib/site-packages/numba/__pycache__/extending.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/__init__.cpython-311.pyc index a6a5d289..3b7e52d0 100644 Binary files a/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc b/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc index 79d72bcd..7fc6f7d9 100644 Binary files a/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/__init__.cpython-311.pyc index ad072fe0..4bdcf185 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/analysis.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/analysis.cpython-311.pyc index e32acefd..5ce055c4 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/analysis.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/base.cpython-311.pyc index 6d9e11ca..4190167f 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/boxing.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/boxing.cpython-311.pyc index ea23e8de..bec7b50a 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/boxing.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/boxing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/bytecode.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/bytecode.cpython-311.pyc index fd49efd7..22ca0e85 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/bytecode.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/bytecode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/byteflow.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/byteflow.cpython-311.pyc index 129786b3..3856b3dc 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/byteflow.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/byteflow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/caching.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/caching.cpython-311.pyc index 52f7b9ed..e0606e7e 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/caching.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/caching.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/callconv.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/callconv.cpython-311.pyc index 4257f18b..069845f2 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/callconv.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/callconv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/callwrapper.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/callwrapper.cpython-311.pyc index d46375b9..e22fb6ef 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/callwrapper.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/callwrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/ccallback.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/ccallback.cpython-311.pyc index 45f14ac4..1103f6f7 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/ccallback.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/ccallback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/cgutils.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/cgutils.cpython-311.pyc index 58253992..48407586 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/cgutils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/cgutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/codegen.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/codegen.cpython-311.pyc index ac172d5f..342b076e 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/codegen.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/codegen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/compiler.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/compiler.cpython-311.pyc index c7942d1e..4cd3fd28 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/compiler.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/compiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/compiler_lock.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/compiler_lock.cpython-311.pyc index d30557cb..de4fa7bd 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/compiler_lock.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/compiler_lock.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/compiler_machinery.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/compiler_machinery.cpython-311.pyc index 1f3018ee..3cbd118e 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/compiler_machinery.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/compiler_machinery.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/config.cpython-311.pyc index 2cec4583..0eec5a6e 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/consts.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/consts.cpython-311.pyc index 538b2d00..28924b76 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/consts.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/consts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/controlflow.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/controlflow.cpython-311.pyc index d65c6018..7bd862f4 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/controlflow.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/controlflow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/cpu.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/cpu.cpython-311.pyc index 51ba328b..d54b6005 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/cpu.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/cpu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/cpu_options.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/cpu_options.cpython-311.pyc index 1871a987..a07f6ad3 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/cpu_options.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/cpu_options.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/debuginfo.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/debuginfo.cpython-311.pyc index b45a3b18..4acc8427 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/debuginfo.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/debuginfo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/decorators.cpython-311.pyc index 86c105f6..22cd914e 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/descriptors.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/descriptors.cpython-311.pyc index 864c835f..22f917b2 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/descriptors.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/descriptors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/dispatcher.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/dispatcher.cpython-311.pyc index 8570d39c..3c35065d 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/dispatcher.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/dispatcher.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/entrypoints.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/entrypoints.cpython-311.pyc index 2d28320c..88c21053 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/entrypoints.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/entrypoints.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/environment.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/environment.cpython-311.pyc index 159fabfc..66d0cad5 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/environment.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/environment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/errors.cpython-311.pyc index 65160cd6..a1b82e7c 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/event.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/event.cpython-311.pyc index b36f8f16..91b8cf22 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/event.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/event.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/extending.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/extending.cpython-311.pyc index 1b4f078e..c4b7e309 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/extending.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/extending.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/externals.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/externals.cpython-311.pyc index 07533e42..ac30e033 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/externals.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/externals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/fastmathpass.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/fastmathpass.cpython-311.pyc index 31476dd8..2c6e3d6a 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/fastmathpass.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/fastmathpass.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/funcdesc.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/funcdesc.cpython-311.pyc index 813872a3..c21f84d2 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/funcdesc.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/funcdesc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/generators.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/generators.cpython-311.pyc index 3b1113cf..c909e463 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/generators.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/generators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/imputils.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/imputils.cpython-311.pyc index fa92d41f..37de6540 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/imputils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/imputils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/inline_closurecall.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/inline_closurecall.cpython-311.pyc index 91be3c7d..1b926da1 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/inline_closurecall.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/inline_closurecall.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/interpreter.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/interpreter.cpython-311.pyc index 965a3c43..4bfcbca1 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/interpreter.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/interpreter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/intrinsics.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/intrinsics.cpython-311.pyc index bd3e13e2..f074b2a6 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/intrinsics.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/intrinsics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/ir.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/ir.cpython-311.pyc index f72ea5c3..15722b1c 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/ir.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/ir.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/ir_utils.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/ir_utils.cpython-311.pyc index 62d83a9c..d7ecb934 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/ir_utils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/ir_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/itanium_mangler.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/itanium_mangler.cpython-311.pyc index 48b5474a..d92d33b6 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/itanium_mangler.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/itanium_mangler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/llvm_bindings.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/llvm_bindings.cpython-311.pyc index 8b209245..8b937426 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/llvm_bindings.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/llvm_bindings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/lowering.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/lowering.cpython-311.pyc index 8b3e964d..74dd6949 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/lowering.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/lowering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/object_mode_passes.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/object_mode_passes.cpython-311.pyc index 53c80c3c..33251b57 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/object_mode_passes.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/object_mode_passes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/optional.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/optional.cpython-311.pyc index d980151c..5580a0bd 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/optional.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/optional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/options.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/options.cpython-311.pyc index b496ee56..86c6faeb 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/options.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/options.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/postproc.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/postproc.cpython-311.pyc index 4169cd24..5c00cba6 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/postproc.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/postproc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/pylowering.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/pylowering.cpython-311.pyc index 843284ff..6c7db85b 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/pylowering.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/pylowering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/pythonapi.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/pythonapi.cpython-311.pyc index df0ad783..ff0552bf 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/pythonapi.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/pythonapi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/registry.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/registry.cpython-311.pyc index 0e63f9c0..e21c4388 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/registry.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/registry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/removerefctpass.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/removerefctpass.cpython-311.pyc index 810479af..39591b57 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/removerefctpass.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/removerefctpass.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/retarget.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/retarget.cpython-311.pyc index a694c534..5a357b1a 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/retarget.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/retarget.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/serialize.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/serialize.cpython-311.pyc index 6d2bad90..f16b01aa 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/serialize.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/serialize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/sigutils.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/sigutils.cpython-311.pyc index a2c7beec..705229e0 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/sigutils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/sigutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/ssa.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/ssa.cpython-311.pyc index bdb208ad..dac04d85 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/ssa.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/ssa.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/target_extension.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/target_extension.cpython-311.pyc index b0d0aa1e..e5433ee1 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/target_extension.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/target_extension.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/targetconfig.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/targetconfig.cpython-311.pyc index 51b637af..1e2ce2ae 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/targetconfig.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/targetconfig.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/tracing.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/tracing.cpython-311.pyc index dafb1d9c..e0af5562 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/tracing.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/tracing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/transforms.cpython-311.pyc index a427c38f..7b8b30ff 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/typed_passes.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/typed_passes.cpython-311.pyc index d93772cb..a1389cd1 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/typed_passes.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/typed_passes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/typeinfer.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/typeinfer.cpython-311.pyc index dbfed501..1eef8391 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/typeinfer.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/typeinfer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/untyped_passes.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/untyped_passes.cpython-311.pyc index c78d30be..af1db0d5 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/untyped_passes.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/untyped_passes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/utils.cpython-311.pyc index 833039b3..ab66910a 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/__pycache__/withcontexts.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/__pycache__/withcontexts.cpython-311.pyc index 148d1f1c..e3113e1d 100644 Binary files a/.venv/Lib/site-packages/numba/core/__pycache__/withcontexts.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/__pycache__/withcontexts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/annotations/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/annotations/__pycache__/__init__.cpython-311.pyc index fb39db1c..2faeddaa 100644 Binary files a/.venv/Lib/site-packages/numba/core/annotations/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/annotations/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/annotations/__pycache__/type_annotations.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/annotations/__pycache__/type_annotations.cpython-311.pyc index 987996a1..8dd88ea0 100644 Binary files a/.venv/Lib/site-packages/numba/core/annotations/__pycache__/type_annotations.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/annotations/__pycache__/type_annotations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/__init__.cpython-311.pyc index 36bac733..71811bdc 100644 Binary files a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/manager.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/manager.cpython-311.pyc index c321e67d..cc5a7f10 100644 Binary files a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/manager.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/manager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/models.cpython-311.pyc index 9fd5adf5..f40b141d 100644 Binary files a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/models.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/packer.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/packer.cpython-311.pyc index e086512b..272e2bd7 100644 Binary files a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/packer.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/packer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/registry.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/registry.cpython-311.pyc index 4b743f6b..0581bf43 100644 Binary files a/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/registry.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/datamodel/__pycache__/registry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/__init__.cpython-311.pyc index a691ed37..42a6d83a 100644 Binary files a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/ir_print.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/ir_print.cpython-311.pyc index 04772df0..ed6e4b39 100644 Binary files a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/ir_print.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/ir_print.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/registry.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/registry.cpython-311.pyc index 0f9a0c82..340fff3a 100644 Binary files a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/registry.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/registry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_binop.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_binop.cpython-311.pyc index 0ca0ef45..ad7d9803 100644 Binary files a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_binop.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_binop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_getitem.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_getitem.cpython-311.pyc index cf57844e..0da2822e 100644 Binary files a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_getitem.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_getitem.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_raise.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_raise.cpython-311.pyc index 4fd02215..97d4ae40 100644 Binary files a/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_raise.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/rewrites/__pycache__/static_raise.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/__init__.cpython-311.pyc index 20e66c56..562d00b1 100644 Binary files a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/context.cpython-311.pyc index 9312ec34..353a04f6 100644 Binary files a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrt.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrt.cpython-311.pyc index e2a0fe17..64279571 100644 Binary files a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrt.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtdynmod.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtdynmod.cpython-311.pyc index cdb8ac7d..77a78545 100644 Binary files a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtdynmod.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtdynmod.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtopt.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtopt.cpython-311.pyc index 01c6c3e3..10cf3a98 100644 Binary files a/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtopt.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/runtime/__pycache__/nrtopt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/__init__.cpython-311.pyc index 1938538f..cfc5c8f2 100644 Binary files a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/castgraph.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/castgraph.cpython-311.pyc index 5dfa0367..438d7d9b 100644 Binary files a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/castgraph.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/castgraph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/rules.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/rules.cpython-311.pyc index d2150630..72a0fec6 100644 Binary files a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/rules.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/rules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/typeconv.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/typeconv.cpython-311.pyc index 5cce44e5..4ba9e21b 100644 Binary files a/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/typeconv.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typeconv/__pycache__/typeconv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/__init__.cpython-311.pyc index 7510a1c2..73c34969 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/abstract.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/abstract.cpython-311.pyc index f84bde10..37efd929 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/abstract.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/abstract.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/common.cpython-311.pyc index f8918762..ddd5677e 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/containers.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/containers.cpython-311.pyc index 53b144bf..b3c887b4 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/containers.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/containers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/function_type.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/function_type.cpython-311.pyc index e35ad336..84549316 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/function_type.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/function_type.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/functions.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/functions.cpython-311.pyc index 1c52f2d9..7bd62d31 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/functions.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/iterators.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/iterators.cpython-311.pyc index d0079c99..3e95e8f5 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/iterators.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/iterators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/misc.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/misc.cpython-311.pyc index 36ef9cea..38fd3c87 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/misc.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/npytypes.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/npytypes.cpython-311.pyc index 60384b74..04bb0af7 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/npytypes.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/npytypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/types/__pycache__/scalars.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/types/__pycache__/scalars.cpython-311.pyc index a09ea9a6..ba5e8a34 100644 Binary files a/.venv/Lib/site-packages/numba/core/types/__pycache__/scalars.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/types/__pycache__/scalars.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/__init__.cpython-311.pyc index 7472e1f2..c2c198f3 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/arraydecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/arraydecl.cpython-311.pyc index 6ba96826..ae59cb16 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/arraydecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/arraydecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/asnumbatype.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/asnumbatype.cpython-311.pyc index 386dd5be..720f13ff 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/asnumbatype.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/asnumbatype.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/bufproto.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/bufproto.cpython-311.pyc index e9d47244..672f8604 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/bufproto.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/bufproto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/builtins.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/builtins.cpython-311.pyc index 8ea28d7e..9b6909bb 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/builtins.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/builtins.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/cffi_utils.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/cffi_utils.cpython-311.pyc index d63377a6..e2dc3cdc 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/cffi_utils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/cffi_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/cmathdecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/cmathdecl.cpython-311.pyc index 753d2647..5e944c91 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/cmathdecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/cmathdecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/collections.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/collections.cpython-311.pyc index 69e718c4..514c8910 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/collections.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/collections.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/context.cpython-311.pyc index 68e5265e..8c96adb6 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/ctypes_utils.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/ctypes_utils.cpython-311.pyc index 03f2fb5d..9a45a434 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/ctypes_utils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/ctypes_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/dictdecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/dictdecl.cpython-311.pyc index 6aed0f57..ca88b9e8 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/dictdecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/dictdecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/enumdecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/enumdecl.cpython-311.pyc index d1e9d227..cfa8f0be 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/enumdecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/enumdecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/listdecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/listdecl.cpython-311.pyc index e918fd07..e4add166 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/listdecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/listdecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/mathdecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/mathdecl.cpython-311.pyc index 8dda2c60..65565daa 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/mathdecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/mathdecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/npdatetime.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/npdatetime.cpython-311.pyc index 12d4d6cc..49bf9f30 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/npdatetime.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/npdatetime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/npydecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/npydecl.cpython-311.pyc index 60ba6579..0e3ce4c8 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/npydecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/npydecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/setdecl.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/setdecl.cpython-311.pyc index d2764843..039241d4 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/setdecl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/setdecl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/templates.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/templates.cpython-311.pyc index fd7937de..1f7f24a4 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/templates.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/templates.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/typing/__pycache__/typeof.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/typing/__pycache__/typeof.cpython-311.pyc index 944dd38b..f8273b40 100644 Binary files a/.venv/Lib/site-packages/numba/core/typing/__pycache__/typeof.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/typing/__pycache__/typeof.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/__init__.cpython-311.pyc index 7e51ceb9..1f22f281 100644 Binary files a/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/bytes.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/bytes.cpython-311.pyc index f660ade8..b24ccaf2 100644 Binary files a/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/bytes.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/bytes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/eh.cpython-311.pyc b/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/eh.cpython-311.pyc index 10d6d09d..22c86b91 100644 Binary files a/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/eh.cpython-311.pyc and b/.venv/Lib/site-packages/numba/core/unsafe/__pycache__/eh.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/__init__.cpython-311.pyc index b55bcf9c..ecbe39c1 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/builtins.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/builtins.cpython-311.pyc index da235a10..31baa258 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/builtins.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/builtins.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/charseq.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/charseq.cpython-311.pyc index 67262e6c..e53ed7dc 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/charseq.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/charseq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/cmathimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/cmathimpl.cpython-311.pyc index 2fa5c7ce..141397f4 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/cmathimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/cmathimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/enumimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/enumimpl.cpython-311.pyc index 0fdaa97b..173fcecb 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/enumimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/enumimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/hashing.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/hashing.cpython-311.pyc index b0153ad5..0b50e5bc 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/hashing.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/hashing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/heapq.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/heapq.cpython-311.pyc index a8dd6592..569f83ac 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/heapq.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/heapq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/iterators.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/iterators.cpython-311.pyc index 9e748404..23798970 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/iterators.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/iterators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/listobj.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/listobj.cpython-311.pyc index a195894c..10bdafff 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/listobj.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/listobj.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/mathimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/mathimpl.cpython-311.pyc index 7345d447..22a160c1 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/mathimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/mathimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/numbers.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/numbers.cpython-311.pyc index d43f08d9..9fa2d4fa 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/numbers.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/numbers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/printimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/printimpl.cpython-311.pyc index f137209a..059b1333 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/printimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/printimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/randomimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/randomimpl.cpython-311.pyc index 1797f33c..16204054 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/randomimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/randomimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/rangeobj.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/rangeobj.cpython-311.pyc index 502d102b..f9fdffe6 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/rangeobj.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/rangeobj.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/setobj.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/setobj.cpython-311.pyc index 0e70da24..7225c8e0 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/setobj.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/setobj.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/slicing.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/slicing.cpython-311.pyc index e97aca4b..6aa5e835 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/slicing.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/slicing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/tupleobj.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/tupleobj.cpython-311.pyc index 7cfaea4a..7fa39fab 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/tupleobj.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/tupleobj.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode.cpython-311.pyc index 2125f685..92c9be00 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode_support.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode_support.cpython-311.pyc index 48ea6b33..f266bfb8 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode_support.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/__pycache__/unicode_support.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/__init__.cpython-311.pyc index b7d9a73d..41c6d40a 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/numbers.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/numbers.cpython-311.pyc index 60d861fb..00c53e62 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/numbers.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/numbers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/tuple.cpython-311.pyc b/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/tuple.cpython-311.pyc index f6049e88..292eefe1 100644 Binary files a/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/tuple.cpython-311.pyc and b/.venv/Lib/site-packages/numba/cpython/unsafe/__pycache__/tuple.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/experimental/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/experimental/__pycache__/__init__.cpython-311.pyc index 20566430..025b2539 100644 Binary files a/.venv/Lib/site-packages/numba/experimental/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/experimental/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/experimental/__pycache__/function_type.cpython-311.pyc b/.venv/Lib/site-packages/numba/experimental/__pycache__/function_type.cpython-311.pyc index 57fe0236..d9e19e6b 100644 Binary files a/.venv/Lib/site-packages/numba/experimental/__pycache__/function_type.cpython-311.pyc and b/.venv/Lib/site-packages/numba/experimental/__pycache__/function_type.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/__init__.cpython-311.pyc index aea971c9..2f6db618 100644 Binary files a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/base.cpython-311.pyc index 77440cf5..cbde61b6 100644 Binary files a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/boxing.cpython-311.pyc b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/boxing.cpython-311.pyc index 4632c854..116dfbba 100644 Binary files a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/boxing.cpython-311.pyc and b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/boxing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/decorators.cpython-311.pyc index 509c7c3c..9ec10264 100644 Binary files a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/overloads.cpython-311.pyc b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/overloads.cpython-311.pyc index bb4972fd..eef168d0 100644 Binary files a/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/overloads.cpython-311.pyc and b/.venv/Lib/site-packages/numba/experimental/jitclass/__pycache__/overloads.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/__init__.cpython-311.pyc index 6f454a31..04d9cc8a 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/appdirs.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/appdirs.cpython-311.pyc index 60693f28..46888c22 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/appdirs.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/appdirs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/cffiimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/cffiimpl.cpython-311.pyc index 3b122019..1a616ae8 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/cffiimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/cffiimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/firstlinefinder.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/firstlinefinder.cpython-311.pyc index 5f3b1df5..2fb98bd4 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/firstlinefinder.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/firstlinefinder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/gdb_hook.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/gdb_hook.cpython-311.pyc index d9a71788..4121aa23 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/gdb_hook.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/gdb_hook.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/init_utils.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/init_utils.cpython-311.pyc index a3079969..15d086e4 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/init_utils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/init_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/inspection.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/inspection.cpython-311.pyc index c8d12109..8350f7ca 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/inspection.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/inspection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/literal.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/literal.cpython-311.pyc index d0769c46..88014260 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/literal.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/literal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/llvm_pass_timings.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/llvm_pass_timings.cpython-311.pyc index e50c7038..b24b5e3b 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/llvm_pass_timings.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/llvm_pass_timings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/mergesort.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/mergesort.cpython-311.pyc index b2bb113f..19cba6f2 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/mergesort.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/mergesort.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/quicksort.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/quicksort.cpython-311.pyc index 596c3cf8..29838d3c 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/quicksort.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/quicksort.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/misc/__pycache__/special.cpython-311.pyc b/.venv/Lib/site-packages/numba/misc/__pycache__/special.cpython-311.pyc index 56f592fb..c673a7f8 100644 Binary files a/.venv/Lib/site-packages/numba/misc/__pycache__/special.cpython-311.pyc and b/.venv/Lib/site-packages/numba/misc/__pycache__/special.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/__init__.cpython-311.pyc index 1fd829f6..f8b20360 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/arraymath.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/arraymath.cpython-311.pyc index 60f5ecd4..30d40c41 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/arraymath.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/arraymath.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/arrayobj.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/arrayobj.cpython-311.pyc index 522c6a90..2078e21a 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/arrayobj.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/arrayobj.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/linalg.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/linalg.cpython-311.pyc index 3196fa0c..47bdfe97 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/linalg.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/linalg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime.cpython-311.pyc index 8b54bcdb..2a667575 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime_helpers.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime_helpers.cpython-311.pyc index a816ad71..267aed22 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/npdatetime_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/npyfuncs.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/npyfuncs.cpython-311.pyc index 52fb138b..7cd89981 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/npyfuncs.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/npyfuncs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/npyimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/npyimpl.cpython-311.pyc index 66b130cc..6b14f195 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/npyimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/npyimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/numpy_support.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/numpy_support.cpython-311.pyc index 0cd253d4..b8a74b9a 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/numpy_support.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/numpy_support.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/__pycache__/ufunc_db.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/__pycache__/ufunc_db.cpython-311.pyc index 5b9b90a4..45e0fbfb 100644 Binary files a/.venv/Lib/site-packages/numba/np/__pycache__/ufunc_db.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/__pycache__/ufunc_db.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/__init__.cpython-311.pyc index 548fc257..109eefc5 100644 Binary files a/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_core.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_core.cpython-311.pyc index 7015b798..9a03238d 100644 Binary files a/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_core.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_functions.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_functions.cpython-311.pyc index 10c93100..e350fef8 100644 Binary files a/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_functions.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/polynomial/__pycache__/polynomial_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/random/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/random/__pycache__/__init__.cpython-311.pyc index 6d66261f..b4539067 100644 Binary files a/.venv/Lib/site-packages/numba/np/random/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/random/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/random/__pycache__/_constants.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/random/__pycache__/_constants.cpython-311.pyc index d3339dc5..1b2be14a 100644 Binary files a/.venv/Lib/site-packages/numba/np/random/__pycache__/_constants.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/random/__pycache__/_constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/random/__pycache__/distributions.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/random/__pycache__/distributions.cpython-311.pyc index 4d0c6bdd..b4771c5c 100644 Binary files a/.venv/Lib/site-packages/numba/np/random/__pycache__/distributions.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/random/__pycache__/distributions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_core.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_core.cpython-311.pyc index 97424f4f..1bbe0351 100644 Binary files a/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_core.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_methods.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_methods.cpython-311.pyc index 1f7df44e..a5be8759 100644 Binary files a/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_methods.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/random/__pycache__/generator_methods.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/random/__pycache__/random_methods.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/random/__pycache__/random_methods.cpython-311.pyc index 11406529..ad8d8928 100644 Binary files a/.venv/Lib/site-packages/numba/np/random/__pycache__/random_methods.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/random/__pycache__/random_methods.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/__init__.cpython-311.pyc index bf0e260d..a50a615f 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/array_exprs.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/array_exprs.cpython-311.pyc index 05a06cdb..db6aa0b6 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/array_exprs.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/array_exprs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/decorators.cpython-311.pyc index a21590eb..5f548883 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/dufunc.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/dufunc.cpython-311.pyc index 755c067a..dad81560 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/dufunc.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/dufunc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/gufunc.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/gufunc.cpython-311.pyc index 7313c245..b654db83 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/gufunc.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/gufunc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/parallel.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/parallel.cpython-311.pyc index 66a728fe..4a51eb38 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/parallel.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/parallel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/sigparse.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/sigparse.cpython-311.pyc index 0b1bfda5..5465c445 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/sigparse.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/sigparse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/ufuncbuilder.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/ufuncbuilder.cpython-311.pyc index b139a34f..6afa831d 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/ufuncbuilder.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/ufuncbuilder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/wrappers.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/wrappers.cpython-311.pyc index 83a05463..810f40aa 100644 Binary files a/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/wrappers.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/ufunc/__pycache__/wrappers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/__init__.cpython-311.pyc index 63ddbc2c..97a2f9a4 100644 Binary files a/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/ndarray.cpython-311.pyc b/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/ndarray.cpython-311.pyc index b8293c11..a41908e4 100644 Binary files a/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/ndarray.cpython-311.pyc and b/.venv/Lib/site-packages/numba/np/unsafe/__pycache__/ndarray.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/parfors/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/parfors/__pycache__/__init__.cpython-311.pyc index 5f27270e..69a3f9d5 100644 Binary files a/.venv/Lib/site-packages/numba/parfors/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/parfors/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/parfors/__pycache__/array_analysis.cpython-311.pyc b/.venv/Lib/site-packages/numba/parfors/__pycache__/array_analysis.cpython-311.pyc index 4d499cf9..a0ce5156 100644 Binary files a/.venv/Lib/site-packages/numba/parfors/__pycache__/array_analysis.cpython-311.pyc and b/.venv/Lib/site-packages/numba/parfors/__pycache__/array_analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor.cpython-311.pyc b/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor.cpython-311.pyc index 063a7688..0941d1ee 100644 Binary files a/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor.cpython-311.pyc and b/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering.cpython-311.pyc b/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering.cpython-311.pyc index a792684d..ea48cf40 100644 Binary files a/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering.cpython-311.pyc and b/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering_utils.cpython-311.pyc b/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering_utils.cpython-311.pyc index 7a0158e6..57a10609 100644 Binary files a/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering_utils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/parfors/__pycache__/parfor_lowering_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/stencils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/stencils/__pycache__/__init__.cpython-311.pyc index 78a8125f..f831f43a 100644 Binary files a/.venv/Lib/site-packages/numba/stencils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/stencils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/stencils/__pycache__/stencil.cpython-311.pyc b/.venv/Lib/site-packages/numba/stencils/__pycache__/stencil.cpython-311.pyc index 95b6fa36..ee7365c9 100644 Binary files a/.venv/Lib/site-packages/numba/stencils/__pycache__/stencil.cpython-311.pyc and b/.venv/Lib/site-packages/numba/stencils/__pycache__/stencil.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/stencils/__pycache__/stencilparfor.cpython-311.pyc b/.venv/Lib/site-packages/numba/stencils/__pycache__/stencilparfor.cpython-311.pyc index 6e30c6d3..87b57925 100644 Binary files a/.venv/Lib/site-packages/numba/stencils/__pycache__/stencilparfor.cpython-311.pyc and b/.venv/Lib/site-packages/numba/stencils/__pycache__/stencilparfor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/typed/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numba/typed/__pycache__/__init__.cpython-311.pyc index 135c82a7..05c849d2 100644 Binary files a/.venv/Lib/site-packages/numba/typed/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numba/typed/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/typed/__pycache__/dictimpl.cpython-311.pyc b/.venv/Lib/site-packages/numba/typed/__pycache__/dictimpl.cpython-311.pyc index b8e600a9..447b2f7b 100644 Binary files a/.venv/Lib/site-packages/numba/typed/__pycache__/dictimpl.cpython-311.pyc and b/.venv/Lib/site-packages/numba/typed/__pycache__/dictimpl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/typed/__pycache__/dictobject.cpython-311.pyc b/.venv/Lib/site-packages/numba/typed/__pycache__/dictobject.cpython-311.pyc index 8bf3584c..09fa8d6f 100644 Binary files a/.venv/Lib/site-packages/numba/typed/__pycache__/dictobject.cpython-311.pyc and b/.venv/Lib/site-packages/numba/typed/__pycache__/dictobject.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/typed/__pycache__/listobject.cpython-311.pyc b/.venv/Lib/site-packages/numba/typed/__pycache__/listobject.cpython-311.pyc index 8d0eb000..763d1a1e 100644 Binary files a/.venv/Lib/site-packages/numba/typed/__pycache__/listobject.cpython-311.pyc and b/.venv/Lib/site-packages/numba/typed/__pycache__/listobject.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/typed/__pycache__/typeddict.cpython-311.pyc b/.venv/Lib/site-packages/numba/typed/__pycache__/typeddict.cpython-311.pyc index fbe50f1f..a235a369 100644 Binary files a/.venv/Lib/site-packages/numba/typed/__pycache__/typeddict.cpython-311.pyc and b/.venv/Lib/site-packages/numba/typed/__pycache__/typeddict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/typed/__pycache__/typedlist.cpython-311.pyc b/.venv/Lib/site-packages/numba/typed/__pycache__/typedlist.cpython-311.pyc index e595c3e8..af5186d7 100644 Binary files a/.venv/Lib/site-packages/numba/typed/__pycache__/typedlist.cpython-311.pyc and b/.venv/Lib/site-packages/numba/typed/__pycache__/typedlist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numba/typed/__pycache__/typedobjectutils.cpython-311.pyc b/.venv/Lib/site-packages/numba/typed/__pycache__/typedobjectutils.cpython-311.pyc index b36a833c..e647fba6 100644 Binary files a/.venv/Lib/site-packages/numba/typed/__pycache__/typedobjectutils.cpython-311.pyc and b/.venv/Lib/site-packages/numba/typed/__pycache__/typedobjectutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/__config__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/__config__.cpython-311.pyc index bdbb96d1..0c77d479 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/__config__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/__config__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/__init__.cpython-311.pyc index d0452d91..da921048 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/_distributor_init.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/_distributor_init.cpython-311.pyc index 4ac5bce2..8e922db2 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/_distributor_init.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/_distributor_init.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/_globals.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/_globals.cpython-311.pyc index 5fd70739..1aaa7da5 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/_globals.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/_globals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/_pytesttester.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/_pytesttester.cpython-311.pyc index eaeb105d..d71871fd 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/_pytesttester.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/_pytesttester.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/ctypeslib.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/ctypeslib.cpython-311.pyc index 4c0eb884..22de924e 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/ctypeslib.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/ctypeslib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/dtypes.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/dtypes.cpython-311.pyc index 1e6a4b58..77454f8a 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/dtypes.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/dtypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/exceptions.cpython-311.pyc index 821d0e4a..4a34a6fe 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/numpy/__pycache__/version.cpython-311.pyc index ca81af44..3b923247 100644 Binary files a/.venv/Lib/site-packages/numpy/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_core/__pycache__/__init__.cpython-311.pyc index d26db70f..99b42c9f 100644 Binary files a/.venv/Lib/site-packages/numpy/_core/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_core/__pycache__/_multiarray_umath.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_core/__pycache__/_multiarray_umath.cpython-311.pyc index 3ceef4a5..1fa42a83 100644 Binary files a/.venv/Lib/site-packages/numpy/_core/__pycache__/_multiarray_umath.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_core/__pycache__/_multiarray_umath.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/__init__.cpython-311.pyc index f01607a9..ac1b897e 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_add_docstring.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_add_docstring.cpython-311.pyc index 632da85b..64e808e4 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_add_docstring.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_add_docstring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_array_like.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_array_like.cpython-311.pyc index ea74c895..21651aa9 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_array_like.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_array_like.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_char_codes.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_char_codes.cpython-311.pyc index 0a33ec5c..f6324952 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_char_codes.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_char_codes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_dtype_like.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_dtype_like.cpython-311.pyc index 31b4e616..268f16e3 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_dtype_like.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_dtype_like.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nbit.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nbit.cpython-311.pyc index 4f675a8d..ed3636d4 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nbit.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nbit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nested_sequence.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nested_sequence.cpython-311.pyc index dd86344a..304f37f1 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nested_sequence.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_nested_sequence.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_scalars.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_scalars.cpython-311.pyc index 8c2ee102..102a42b1 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_scalars.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_scalars.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_shape.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_shape.cpython-311.pyc index 35bdc424..44268c2a 100644 Binary files a/.venv/Lib/site-packages/numpy/_typing/__pycache__/_shape.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_typing/__pycache__/_shape.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_utils/__pycache__/__init__.cpython-311.pyc index f775ca3f..125b84cb 100644 Binary files a/.venv/Lib/site-packages/numpy/_utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_utils/__pycache__/_convertions.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_utils/__pycache__/_convertions.cpython-311.pyc index ae26e906..f3d5482f 100644 Binary files a/.venv/Lib/site-packages/numpy/_utils/__pycache__/_convertions.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_utils/__pycache__/_convertions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/_utils/__pycache__/_inspect.cpython-311.pyc b/.venv/Lib/site-packages/numpy/_utils/__pycache__/_inspect.cpython-311.pyc index fd29ba41..2b6933e9 100644 Binary files a/.venv/Lib/site-packages/numpy/_utils/__pycache__/_inspect.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/_utils/__pycache__/_inspect.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/compat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/compat/__pycache__/__init__.cpython-311.pyc index 20a0452c..70bf6001 100644 Binary files a/.venv/Lib/site-packages/numpy/compat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/compat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/compat/__pycache__/py3k.cpython-311.pyc b/.venv/Lib/site-packages/numpy/compat/__pycache__/py3k.cpython-311.pyc index 0a0bea66..a3be499c 100644 Binary files a/.venv/Lib/site-packages/numpy/compat/__pycache__/py3k.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/compat/__pycache__/py3k.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/__init__.cpython-311.pyc index 6a054213..735cddcb 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs.cpython-311.pyc index 30742964..38827067 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs_scalars.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs_scalars.cpython-311.pyc index 9d66bd31..448f0159 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs_scalars.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_add_newdocs_scalars.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_asarray.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_asarray.cpython-311.pyc index 0a309e86..d7184fa4 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_asarray.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_asarray.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype.cpython-311.pyc index b1f8e4cc..eebc4e80 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype_ctypes.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype_ctypes.cpython-311.pyc index 87b86455..a1b7e574 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype_ctypes.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_dtype_ctypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_exceptions.cpython-311.pyc index 96286a38..c76cc147 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_internal.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_internal.cpython-311.pyc index 22cc4b34..867ac59d 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_internal.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_internal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_machar.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_machar.cpython-311.pyc index a9d2f9db..e5c5e52b 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_machar.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_machar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_methods.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_methods.cpython-311.pyc index 2e535f2a..caceda04 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_methods.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_methods.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_string_helpers.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_string_helpers.cpython-311.pyc index 6fce25a7..65b15654 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_string_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_string_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_type_aliases.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_type_aliases.cpython-311.pyc index f444479d..9e05ee9e 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_type_aliases.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_type_aliases.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/_ufunc_config.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/_ufunc_config.cpython-311.pyc index 5c27d4d3..9fed4fb8 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/_ufunc_config.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/_ufunc_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/arrayprint.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/arrayprint.cpython-311.pyc index d4508890..d41e5fd0 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/arrayprint.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/arrayprint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/defchararray.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/defchararray.cpython-311.pyc index b879be83..827c25d4 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/defchararray.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/defchararray.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/einsumfunc.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/einsumfunc.cpython-311.pyc index 0a3f7faa..bcbe03c2 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/einsumfunc.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/einsumfunc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/fromnumeric.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/fromnumeric.cpython-311.pyc index bac2de49..ce0c53af 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/fromnumeric.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/fromnumeric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/function_base.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/function_base.cpython-311.pyc index 9909d834..5686a5fa 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/function_base.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/function_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/getlimits.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/getlimits.cpython-311.pyc index ab2cbb4f..be5381b3 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/getlimits.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/getlimits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/memmap.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/memmap.cpython-311.pyc index 2d429cf9..b5cfa910 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/memmap.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/memmap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/multiarray.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/multiarray.cpython-311.pyc index 76574f40..99f8b904 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/multiarray.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/multiarray.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/numeric.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/numeric.cpython-311.pyc index 658532cd..de43d5c0 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/numeric.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/numeric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/numerictypes.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/numerictypes.cpython-311.pyc index 9ad309b0..3bfa8035 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/numerictypes.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/numerictypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/overrides.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/overrides.cpython-311.pyc index bcbcab3d..1dcdc788 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/overrides.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/overrides.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/records.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/records.cpython-311.pyc index c38bf4ec..9a72b601 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/records.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/records.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/shape_base.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/shape_base.cpython-311.pyc index da6f4bf1..2a6cddbc 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/shape_base.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/shape_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/core/__pycache__/umath.cpython-311.pyc b/.venv/Lib/site-packages/numpy/core/__pycache__/umath.cpython-311.pyc index c1794305..e32332b3 100644 Binary files a/.venv/Lib/site-packages/numpy/core/__pycache__/umath.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/core/__pycache__/umath.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/fft/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/fft/__pycache__/__init__.cpython-311.pyc index bd673de7..542b7ed3 100644 Binary files a/.venv/Lib/site-packages/numpy/fft/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/fft/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/fft/__pycache__/_pocketfft.cpython-311.pyc b/.venv/Lib/site-packages/numpy/fft/__pycache__/_pocketfft.cpython-311.pyc index 07bff7e1..234c56cc 100644 Binary files a/.venv/Lib/site-packages/numpy/fft/__pycache__/_pocketfft.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/fft/__pycache__/_pocketfft.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/fft/__pycache__/helper.cpython-311.pyc b/.venv/Lib/site-packages/numpy/fft/__pycache__/helper.cpython-311.pyc index 58448a27..49c2f4fb 100644 Binary files a/.venv/Lib/site-packages/numpy/fft/__pycache__/helper.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/fft/__pycache__/helper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/__init__.cpython-311.pyc index 0940f453..7a8541be 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/_datasource.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/_datasource.cpython-311.pyc index 608d9468..b594c8a4 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/_datasource.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/_datasource.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/_iotools.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/_iotools.cpython-311.pyc index 315803a4..ed63df21 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/_iotools.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/_iotools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/_version.cpython-311.pyc index 418e36b9..156b584b 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/arraypad.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/arraypad.cpython-311.pyc index b610988e..f81bb935 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/arraypad.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/arraypad.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/arraysetops.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/arraysetops.cpython-311.pyc index a4015661..792d3eec 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/arraysetops.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/arraysetops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/arrayterator.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/arrayterator.cpython-311.pyc index da54b81f..2e4ab50f 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/arrayterator.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/arrayterator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/format.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/format.cpython-311.pyc index 2c2ad1b8..552e5647 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/format.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/format.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/function_base.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/function_base.cpython-311.pyc index d94000f7..15db91dd 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/function_base.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/function_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/histograms.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/histograms.cpython-311.pyc index 56566309..d6f8f4d3 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/histograms.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/histograms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/index_tricks.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/index_tricks.cpython-311.pyc index 4eca05a9..1b9e8d3e 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/index_tricks.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/index_tricks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/mixins.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/mixins.cpython-311.pyc index 2e41bcc9..5943f1df 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/mixins.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/mixins.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/nanfunctions.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/nanfunctions.cpython-311.pyc index b70ac5bb..a4a637d4 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/nanfunctions.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/nanfunctions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/npyio.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/npyio.cpython-311.pyc index d4e74440..dd7a2f67 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/npyio.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/npyio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/polynomial.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/polynomial.cpython-311.pyc index faa64189..0fe9ab0e 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/polynomial.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/polynomial.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/scimath.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/scimath.cpython-311.pyc index c5f65d04..a5e29046 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/scimath.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/scimath.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/shape_base.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/shape_base.cpython-311.pyc index 33d99cf4..49532b44 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/shape_base.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/shape_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/stride_tricks.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/stride_tricks.cpython-311.pyc index 982036f1..e617c2f7 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/stride_tricks.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/stride_tricks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/twodim_base.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/twodim_base.cpython-311.pyc index 7fe5154e..ce8e5931 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/twodim_base.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/twodim_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/type_check.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/type_check.cpython-311.pyc index 91fdecd3..58fbd6c6 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/type_check.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/type_check.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/ufunclike.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/ufunclike.cpython-311.pyc index 3fc32034..a38566e1 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/ufunclike.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/ufunclike.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/lib/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/numpy/lib/__pycache__/utils.cpython-311.pyc index 9f634a5f..9f2c23f7 100644 Binary files a/.venv/Lib/site-packages/numpy/lib/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/lib/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/linalg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/linalg/__pycache__/__init__.cpython-311.pyc index d27136d2..70b08b3e 100644 Binary files a/.venv/Lib/site-packages/numpy/linalg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/linalg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/linalg/__pycache__/linalg.cpython-311.pyc b/.venv/Lib/site-packages/numpy/linalg/__pycache__/linalg.cpython-311.pyc index c3deb8d1..68e337d8 100644 Binary files a/.venv/Lib/site-packages/numpy/linalg/__pycache__/linalg.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/linalg/__pycache__/linalg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/ma/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/ma/__pycache__/__init__.cpython-311.pyc index 07bcfc8c..66a222ec 100644 Binary files a/.venv/Lib/site-packages/numpy/ma/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/ma/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/ma/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/numpy/ma/__pycache__/core.cpython-311.pyc index 91bf2155..426e521a 100644 Binary files a/.venv/Lib/site-packages/numpy/ma/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/ma/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/ma/__pycache__/extras.cpython-311.pyc b/.venv/Lib/site-packages/numpy/ma/__pycache__/extras.cpython-311.pyc index 747bec85..d8481f4f 100644 Binary files a/.venv/Lib/site-packages/numpy/ma/__pycache__/extras.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/ma/__pycache__/extras.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/__init__.cpython-311.pyc index d639de9e..43f11e45 100644 Binary files a/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/defmatrix.cpython-311.pyc b/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/defmatrix.cpython-311.pyc index 038bb5f6..94fabe7e 100644 Binary files a/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/defmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/matrixlib/__pycache__/defmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/__init__.cpython-311.pyc index 38a14c29..6ec8640b 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/_polybase.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/_polybase.cpython-311.pyc index 91a0ac05..8eb66912 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/_polybase.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/_polybase.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/chebyshev.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/chebyshev.cpython-311.pyc index df8e8780..ef8c4dcb 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/chebyshev.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/chebyshev.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite.cpython-311.pyc index 99896303..b9be6924 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite_e.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite_e.cpython-311.pyc index 52da8b50..51fbbb23 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite_e.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/hermite_e.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/laguerre.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/laguerre.cpython-311.pyc index fb0dc4cf..01576a89 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/laguerre.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/laguerre.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/legendre.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/legendre.cpython-311.pyc index 3bc957f5..feca92a2 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/legendre.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/legendre.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polynomial.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polynomial.cpython-311.pyc index 5bbe54a4..03211852 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polynomial.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polynomial.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polyutils.cpython-311.pyc b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polyutils.cpython-311.pyc index 787e1f2a..2b067d8d 100644 Binary files a/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polyutils.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/polynomial/__pycache__/polyutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/random/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/random/__pycache__/__init__.cpython-311.pyc index aeba2489..56f5507f 100644 Binary files a/.venv/Lib/site-packages/numpy/random/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/random/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/random/__pycache__/_pickle.cpython-311.pyc b/.venv/Lib/site-packages/numpy/random/__pycache__/_pickle.cpython-311.pyc index 8711ae83..d65558ac 100644 Binary files a/.venv/Lib/site-packages/numpy/random/__pycache__/_pickle.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/random/__pycache__/_pickle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/numpy/typing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/numpy/typing/__pycache__/__init__.cpython-311.pyc index e005ecc4..803985a2 100644 Binary files a/.venv/Lib/site-packages/numpy/typing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/numpy/typing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai-1.25.2.dist-info/INSTALLER b/.venv/Lib/site-packages/openai-1.25.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/openai-1.25.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/openai-1.25.2.dist-info/METADATA b/.venv/Lib/site-packages/openai-1.25.2.dist-info/METADATA new file mode 100644 index 00000000..c0df8fed --- /dev/null +++ b/.venv/Lib/site-packages/openai-1.25.2.dist-info/METADATA @@ -0,0 +1,677 @@ +Metadata-Version: 2.3 +Name: openai +Version: 1.25.2 +Summary: The official Python library for the openai API +Project-URL: Homepage, https://github.com/openai/openai-python +Project-URL: Repository, https://github.com/openai/openai-python +Author-email: OpenAI +License-Expression: Apache-2.0 +License-File: LICENSE +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: OS Independent +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Requires-Python: >=3.7.1 +Requires-Dist: anyio<5,>=3.5.0 +Requires-Dist: cached-property; python_version < '3.8' +Requires-Dist: distro<2,>=1.7.0 +Requires-Dist: httpx<1,>=0.23.0 +Requires-Dist: pydantic<3,>=1.9.0 +Requires-Dist: sniffio +Requires-Dist: tqdm>4 +Requires-Dist: typing-extensions<5,>=4.7 +Provides-Extra: datalib +Requires-Dist: numpy>=1; extra == 'datalib' +Requires-Dist: pandas-stubs>=1.1.0.11; extra == 'datalib' +Requires-Dist: pandas>=1.2.3; extra == 'datalib' +Description-Content-Type: text/markdown + +# OpenAI Python API library + +[![PyPI version](https://img.shields.io/pypi/v/openai.svg)](https://pypi.org/project/openai/) + +The OpenAI Python library provides convenient access to the OpenAI REST API from any Python 3.7+ +application. The library includes type definitions for all request params and response fields, +and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). + +It is generated from our [OpenAPI specification](https://github.com/openai/openai-openapi) with [Stainless](https://stainlessapi.com/). + +## Documentation + +The REST API documentation can be found [on platform.openai.com](https://platform.openai.com/docs). The full API of this library can be found in [api.md](https://github.com/openai/openai-python/tree/main/api.md). + +## Installation + +> [!IMPORTANT] +> The SDK was rewritten in v1, which was released November 6th 2023. See the [v1 migration guide](https://github.com/openai/openai-python/discussions/742), which includes scripts to automatically update your code. + +```sh +# install from PyPI +pip install openai +``` + +## Usage + +The full API of this library can be found in [api.md](https://github.com/openai/openai-python/tree/main/api.md). + +```python +import os +from openai import OpenAI + +client = OpenAI( + # This is the default and can be omitted + api_key=os.environ.get("OPENAI_API_KEY"), +) + +chat_completion = client.chat.completions.create( + messages=[ + { + "role": "user", + "content": "Say this is a test", + } + ], + model="gpt-3.5-turbo", +) +``` + +While you can provide an `api_key` keyword argument, +we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) +to add `OPENAI_API_KEY="My API Key"` to your `.env` file +so that your API Key is not stored in source control. + +### Polling Helpers + +When interacting with the API some actions such as starting a Run and adding files to vector stores are asynchronous and take time to complete. The SDK includes +helper functions which will poll the status until it reaches a terminal state and then return the resulting object. +If an API method results in an action which could benefit from polling there will be a corresponding version of the +method ending in '\_and_poll'. + +For instance to create a Run and poll until it reaches a terminal state you can run: + +```python +run = client.beta.threads.runs.create_and_poll( + thread_id=thread.id, + assistant_id=assistant.id, +) +``` + +More information on the lifecycle of a Run can be found in the [Run Lifecycle Documentation](https://platform.openai.com/docs/assistants/how-it-works/run-lifecycle) + +### Bulk Upload Helpers + +When creating an interacting with vector stores, you can use the polling helpers to monitor the status of operations. +For convenience, we also provide a bulk upload helper to allow you to simultaneously upload several files at once. + +```python +sample_files = [Path("sample-paper.pdf"), ...] + +batch = await client.vector_stores.file_batches.upload_and_poll( + store.id, + files=sample_files, +) +``` + +### Streaming Helpers + +The SDK also includes helpers to process streams and handle the incoming events. + +```python +with client.beta.threads.runs.stream( + thread_id=thread.id, + assistant_id=assistant.id, + instructions="Please address the user as Jane Doe. The user has a premium account.", +) as stream: + for event in stream: + # Print the text from text delta events + if event.type == "thread.message.delta" and event.data.delta.content: + print(event.data.delta.content[0].text) +``` + +More information on streaming helpers can be found in the dedicated documentation: [helpers.md](https://github.com/openai/openai-python/tree/main/helpers.md) + +## Async usage + +Simply import `AsyncOpenAI` instead of `OpenAI` and use `await` with each API call: + +```python +import os +import asyncio +from openai import AsyncOpenAI + +client = AsyncOpenAI( + # This is the default and can be omitted + api_key=os.environ.get("OPENAI_API_KEY"), +) + + +async def main() -> None: + chat_completion = await client.chat.completions.create( + messages=[ + { + "role": "user", + "content": "Say this is a test", + } + ], + model="gpt-3.5-turbo", + ) + + +asyncio.run(main()) +``` + +Functionality between the synchronous and asynchronous clients is otherwise identical. + +## Streaming responses + +We provide support for streaming responses using Server Side Events (SSE). + +```python +from openai import OpenAI + +client = OpenAI() + +stream = client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": "Say this is a test"}], + stream=True, +) +for chunk in stream: + print(chunk.choices[0].delta.content or "", end="") +``` + +The async client uses the exact same interface. + +```python +from openai import AsyncOpenAI + +client = AsyncOpenAI() + + +async def main(): + stream = await client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": "Say this is a test"}], + stream=True, + ) + async for chunk in stream: + print(chunk.choices[0].delta.content or "", end="") + + +asyncio.run(main()) +``` + +## Module-level client + +> [!IMPORTANT] +> We highly recommend instantiating client instances instead of relying on the global client. + +We also expose a global client instance that is accessible in a similar fashion to versions prior to v1. + +```py +import openai + +# optional; defaults to `os.environ['OPENAI_API_KEY']` +openai.api_key = '...' + +# all client options can be configured just like the `OpenAI` instantiation counterpart +openai.base_url = "https://..." +openai.default_headers = {"x-foo": "true"} + +completion = openai.chat.completions.create( + model="gpt-4", + messages=[ + { + "role": "user", + "content": "How do I output all files in a directory using Python?", + }, + ], +) +print(completion.choices[0].message.content) +``` + +The API is the exact same as the standard client instance based API. + +This is intended to be used within REPLs or notebooks for faster iteration, **not** in application code. + +We recommend that you always instantiate a client (e.g., with `client = OpenAI()`) in application code because: + +- It can be difficult to reason about where client options are configured +- It's not possible to change certain client options without potentially causing race conditions +- It's harder to mock for testing purposes +- It's not possible to control cleanup of network connections + +## Using types + +Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: + +- Serializing back into JSON, `model.to_json()` +- Converting to a dictionary, `model.to_dict()` + +Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. + +## Pagination + +List methods in the OpenAI API are paginated. + +This library provides auto-paginating iterators with each list response, so you do not have to request successive pages manually: + +```python +import openai + +client = OpenAI() + +all_jobs = [] +# Automatically fetches more pages as needed. +for job in client.fine_tuning.jobs.list( + limit=20, +): + # Do something with job here + all_jobs.append(job) +print(all_jobs) +``` + +Or, asynchronously: + +```python +import asyncio +import openai + +client = AsyncOpenAI() + + +async def main() -> None: + all_jobs = [] + # Iterate through items across all pages, issuing requests as needed. + async for job in client.fine_tuning.jobs.list( + limit=20, + ): + all_jobs.append(job) + print(all_jobs) + + +asyncio.run(main()) +``` + +Alternatively, you can use the `.has_next_page()`, `.next_page_info()`, or `.get_next_page()` methods for more granular control working with pages: + +```python +first_page = await client.fine_tuning.jobs.list( + limit=20, +) +if first_page.has_next_page(): + print(f"will fetch next page using these details: {first_page.next_page_info()}") + next_page = await first_page.get_next_page() + print(f"number of items we just fetched: {len(next_page.data)}") + +# Remove `await` for non-async usage. +``` + +Or just work directly with the returned data: + +```python +first_page = await client.fine_tuning.jobs.list( + limit=20, +) + +print(f"next page cursor: {first_page.after}") # => "next page cursor: ..." +for job in first_page.data: + print(job.id) + +# Remove `await` for non-async usage. +``` + +## Nested params + +Nested parameters are dictionaries, typed using `TypedDict`, for example: + +```python +from openai import OpenAI + +client = OpenAI() + +completion = client.chat.completions.create( + messages=[ + { + "role": "user", + "content": "Can you generate an example json object describing a fruit?", + } + ], + model="gpt-3.5-turbo-1106", + response_format={"type": "json_object"}, +) +``` + +## File uploads + +Request parameters that correspond to file uploads can be passed as `bytes`, a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance or a tuple of `(filename, contents, media type)`. + +```python +from pathlib import Path +from openai import OpenAI + +client = OpenAI() + +client.files.create( + file=Path("input.jsonl"), + purpose="fine-tune", +) +``` + +The async client uses the exact same interface. If you pass a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance, the file contents will be read asynchronously automatically. + +## Handling errors + +When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `openai.APIConnectionError` is raised. + +When the API returns a non-success status code (that is, 4xx or 5xx +response), a subclass of `openai.APIStatusError` is raised, containing `status_code` and `response` properties. + +All errors inherit from `openai.APIError`. + +```python +import openai +from openai import OpenAI + +client = OpenAI() + +try: + client.fine_tuning.jobs.create( + model="gpt-3.5-turbo", + training_file="file-abc123", + ) +except openai.APIConnectionError as e: + print("The server could not be reached") + print(e.__cause__) # an underlying Exception, likely raised within httpx. +except openai.RateLimitError as e: + print("A 429 status code was received; we should back off a bit.") +except openai.APIStatusError as e: + print("Another non-200-range status code was received") + print(e.status_code) + print(e.response) +``` + +Error codes are as followed: + +| Status Code | Error Type | +| ----------- | -------------------------- | +| 400 | `BadRequestError` | +| 401 | `AuthenticationError` | +| 403 | `PermissionDeniedError` | +| 404 | `NotFoundError` | +| 422 | `UnprocessableEntityError` | +| 429 | `RateLimitError` | +| >=500 | `InternalServerError` | +| N/A | `APIConnectionError` | + +### Retries + +Certain errors are automatically retried 2 times by default, with a short exponential backoff. +Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, +429 Rate Limit, and >=500 Internal errors are all retried by default. + +You can use the `max_retries` option to configure or disable retry settings: + +```python +from openai import OpenAI + +# Configure the default for all requests: +client = OpenAI( + # default is 2 + max_retries=0, +) + +# Or, configure per-request: +client.with_options(max_retries=5).chat.completions.create( + messages=[ + { + "role": "user", + "content": "How can I get the name of the current day in Node.js?", + } + ], + model="gpt-3.5-turbo", +) +``` + +### Timeouts + +By default requests time out after 10 minutes. You can configure this with a `timeout` option, +which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object: + +```python +from openai import OpenAI + +# Configure the default for all requests: +client = OpenAI( + # 20 seconds (default is 10 minutes) + timeout=20.0, +) + +# More granular control: +client = OpenAI( + timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), +) + +# Override per-request: +client.with_options(timeout=5.0).chat.completions.create( + messages=[ + { + "role": "user", + "content": "How can I list all files in a directory using Python?", + } + ], + model="gpt-3.5-turbo", +) +``` + +On timeout, an `APITimeoutError` is thrown. + +Note that requests that time out are [retried twice by default](https://github.com/openai/openai-python/tree/main/#retries). + +## Advanced + +### Logging + +We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. + +You can enable logging by setting the environment variable `OPENAI_LOG` to `debug`. + +```shell +$ export OPENAI_LOG=debug +``` + +### How to tell whether `None` means `null` or missing + +In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: + +```py +if response.my_field is None: + if 'my_field' not in response.model_fields_set: + print('Got json like {}, without a "my_field" key present at all.') + else: + print('Got json like {"my_field": null}.') +``` + +### Accessing raw response data (e.g. headers) + +The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., + +```py +from openai import OpenAI + +client = OpenAI() +response = client.chat.completions.with_raw_response.create( + messages=[{ + "role": "user", + "content": "Say this is a test", + }], + model="gpt-3.5-turbo", +) +print(response.headers.get('X-My-Header')) + +completion = response.parse() # get the object that `chat.completions.create()` would have returned +print(completion) +``` + +These methods return an [`LegacyAPIResponse`](https://github.com/openai/openai-python/tree/main/src/openai/_legacy_response.py) object. This is a legacy class as we're changing it slightly in the next major version. + +For the sync client this will mostly be the same with the exception +of `content` & `text` will be methods instead of properties. In the +async client, all methods will be async. + +A migration script will be provided & the migration in general should +be smooth. + +#### `.with_streaming_response` + +The above interface eagerly reads the full response body when you make the request, which may not always be what you want. + +To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. + +As such, `.with_streaming_response` methods return a different [`APIResponse`](https://github.com/openai/openai-python/tree/main/src/openai/_response.py) object, and the async client returns an [`AsyncAPIResponse`](https://github.com/openai/openai-python/tree/main/src/openai/_response.py) object. + +```python +with client.chat.completions.with_streaming_response.create( + messages=[ + { + "role": "user", + "content": "Say this is a test", + } + ], + model="gpt-3.5-turbo", +) as response: + print(response.headers.get("X-My-Header")) + + for line in response.iter_lines(): + print(line) +``` + +The context manager is required so that the response will reliably be closed. + +### Making custom/undocumented requests + +This library is typed for convenient access to the documented API. + +If you need to access undocumented endpoints, params, or response properties, the library can still be used. + +#### Undocumented endpoints + +To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other +http verbs. Options on the client will be respected (such as retries) will be respected when making this +request. + +```py +import httpx + +response = client.post( + "/foo", + cast_to=httpx.Response, + body={"my_param": True}, +) + +print(response.headers.get("x-foo")) +``` + +#### Undocumented request params + +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request +options. + +#### Undocumented response properties + +To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You +can also get all the extra fields on the Pydantic model as a dict with +[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). + +### Configuring the HTTP client + +You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: + +- Support for proxies +- Custom transports +- Additional [advanced](https://www.python-httpx.org/advanced/#client-instances) functionality + +```python +from openai import OpenAI, DefaultHttpxClient + +client = OpenAI( + # Or use the `OPENAI_BASE_URL` env var + base_url="http://my.test.server.example.com:8083", + http_client=DefaultHttpxClient( + proxies="http://my.test.proxy.example.com", + transport=httpx.HTTPTransport(local_address="0.0.0.0"), + ), +) +``` + +### Managing HTTP resources + +By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. + +## Microsoft Azure OpenAI + +To use this library with [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview), use the `AzureOpenAI` +class instead of the `OpenAI` class. + +> [!IMPORTANT] +> The Azure API shape differs from the core API shape which means that the static types for responses / params +> won't always be correct. + +```py +from openai import AzureOpenAI + +# gets the API Key from environment variable AZURE_OPENAI_API_KEY +client = AzureOpenAI( + # https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning + api_version="2023-07-01-preview", + # https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource + azure_endpoint="https://example-endpoint.openai.azure.com", +) + +completion = client.chat.completions.create( + model="deployment-name", # e.g. gpt-35-instant + messages=[ + { + "role": "user", + "content": "How do I output all files in a directory using Python?", + }, + ], +) +print(completion.to_json()) +``` + +In addition to the options provided in the base `OpenAI` client, the following options are provided: + +- `azure_endpoint` (or the `AZURE_OPENAI_ENDPOINT` environment variable) +- `azure_deployment` +- `api_version` (or the `OPENAI_API_VERSION` environment variable) +- `azure_ad_token` (or the `AZURE_OPENAI_AD_TOKEN` environment variable) +- `azure_ad_token_provider` + +An example of using the client with Azure Active Directory can be found [here](https://github.com/openai/openai-python/blob/main/examples/azure_ad.py). + +## Versioning + +This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: + +1. Changes that only affect static types, without breaking runtime behavior. +2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals)_. +3. Changes that we do not expect to impact the vast majority of users in practice. + +We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. + +We are keen for your feedback; please open an [issue](https://www.github.com/openai/openai-python/issues) with questions, bugs, or suggestions. + +## Requirements + +Python 3.7 or higher. diff --git a/.venv/Lib/site-packages/openai-1.25.2.dist-info/RECORD b/.venv/Lib/site-packages/openai-1.25.2.dist-info/RECORD new file mode 100644 index 00000000..a8c6ec4c --- /dev/null +++ b/.venv/Lib/site-packages/openai-1.25.2.dist-info/RECORD @@ -0,0 +1,528 @@ +../../Scripts/openai.exe,sha256=NDg2bYy_yfCGvoaRjJ-qZpysaaIzbER4DXBCg_oZs5I,108395 +openai-1.25.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +openai-1.25.2.dist-info/METADATA,sha256=q-RqnlcCJqaFhUWDzswfbiK2qdDjPi-FmgCvzelJtYw,21941 +openai-1.25.2.dist-info/RECORD,, +openai-1.25.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +openai-1.25.2.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87 +openai-1.25.2.dist-info/entry_points.txt,sha256=kAYhQEmziJwsKs5raYAIOvJ2LWmbz5dulEXOzsY71ro,43 +openai-1.25.2.dist-info/licenses/LICENSE,sha256=d0M6HDjQ76tf255XPlAGkIoECMe688MXcGEYsOFySfI,11336 +openai/__init__.py,sha256=hTM-EsfeafKBLu-n5AVSQVDB2MMBGnZoLtATFeW-OL0,10007 +openai/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +openai/__pycache__/__init__.cpython-311.pyc,, +openai/__pycache__/__main__.cpython-311.pyc,, +openai/__pycache__/_base_client.cpython-311.pyc,, +openai/__pycache__/_client.cpython-311.pyc,, +openai/__pycache__/_compat.cpython-311.pyc,, +openai/__pycache__/_constants.cpython-311.pyc,, +openai/__pycache__/_exceptions.cpython-311.pyc,, +openai/__pycache__/_files.cpython-311.pyc,, +openai/__pycache__/_legacy_response.cpython-311.pyc,, +openai/__pycache__/_models.cpython-311.pyc,, +openai/__pycache__/_module_client.cpython-311.pyc,, +openai/__pycache__/_qs.cpython-311.pyc,, +openai/__pycache__/_resource.cpython-311.pyc,, +openai/__pycache__/_response.cpython-311.pyc,, +openai/__pycache__/_streaming.cpython-311.pyc,, +openai/__pycache__/_types.cpython-311.pyc,, +openai/__pycache__/_version.cpython-311.pyc,, +openai/__pycache__/pagination.cpython-311.pyc,, +openai/__pycache__/version.cpython-311.pyc,, +openai/_base_client.py,sha256=EvBV2cnkgHo9YD0BzCmKiZFVtioTnFusMVv3btr9J30,65358 +openai/_client.py,sha256=BIWse5bWvbEIyGNdGqM5RjjP6zD0oFvFKFtqbIO-xf4,21751 +openai/_compat.py,sha256=m0I0haqFZuVxd5m227_8nNmvA1saXyuNJ7BjidX_PTE,6389 +openai/_constants.py,sha256=L1pfEhuz_wM2w2_U9P_9JZzTbrN4pbLo207l96rtKcQ,469 +openai/_exceptions.py,sha256=IXzw429JsoOD9PbpqLWvncvuOuqU_GOZ1z9D494BZxU,3892 +openai/_extras/__init__.py,sha256=LZbJLZ7aFHRcI7uiY4-wFQTdMp-BF6FER1QMhKVFkWk,107 +openai/_extras/__pycache__/__init__.cpython-311.pyc,, +openai/_extras/__pycache__/_common.cpython-311.pyc,, +openai/_extras/__pycache__/numpy_proxy.cpython-311.pyc,, +openai/_extras/__pycache__/pandas_proxy.cpython-311.pyc,, +openai/_extras/_common.py,sha256=NWWtgbdJsO3hQGQxaXGfVk0LjeIE5AFZ8VS_795hhMc,364 +openai/_extras/numpy_proxy.py,sha256=hwZXa_JBAPD5taRhor1tGxK26g5IaK52JclQDl-dky0,799 +openai/_extras/pandas_proxy.py,sha256=NCEt1Dqwc_0H85YdsWPDE3lPDJtYnBT8G-gJE_BCeEc,637 +openai/_files.py,sha256=O4WNhHahzd5ZRe4F69WlBJegBpQM3O9YGeXWNkz972Y,3632 +openai/_legacy_response.py,sha256=GLrqADb4ed3N5hffQJpU2nSZQ85OVLODB4JVcWFA4u4,15575 +openai/_models.py,sha256=uP4bh5E_BfBqh_uT0YwKoDMmxcIG0IWwnbtwtItnhO0,26096 +openai/_module_client.py,sha256=gF_2bbdosIwUt29sQgrQRJOgNREvXF-IDxe4XKGhHjY,2523 +openai/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846 +openai/_resource.py,sha256=IQihFzFLhGOiGSlT2dO1ESWSTg2XypgbtAldtGdTOqU,1100 +openai/_response.py,sha256=FhY-5uevGc0KRDmI0eH5n1g4ok-t4lcNq8aDnM-DWqE,28873 +openai/_streaming.py,sha256=t1UZrg53fVJB5Rs6k2sT9PBbvjp-IGrQzUq_5nlxKG4,13102 +openai/_types.py,sha256=sZvy7fSCEWzjt1Fw9gqYHLJ78q9eces6pzMYAbPSyHQ,6226 +openai/_utils/__init__.py,sha256=NqFXgdc-_0_h2jOO8BrNcNupWHYfa91i6DVDtxSr4Y4,1847 +openai/_utils/__pycache__/__init__.cpython-311.pyc,, +openai/_utils/__pycache__/_logs.cpython-311.pyc,, +openai/_utils/__pycache__/_proxy.cpython-311.pyc,, +openai/_utils/__pycache__/_streams.cpython-311.pyc,, +openai/_utils/__pycache__/_sync.cpython-311.pyc,, +openai/_utils/__pycache__/_transform.cpython-311.pyc,, +openai/_utils/__pycache__/_typing.cpython-311.pyc,, +openai/_utils/__pycache__/_utils.cpython-311.pyc,, +openai/_utils/_logs.py,sha256=sFA_NejuNObTGGbfsXC03I38mrT9HjsgAJx4d3GP0ok,774 +openai/_utils/_proxy.py,sha256=DjcB-BBIRagSbMut2pF_jZavjda9sPvmQCKtVXBhs0I,1910 +openai/_utils/_streams.py,sha256=SMC90diFFecpEg_zgDRVbdR3hSEIgVVij4taD-noMLM,289 +openai/_utils/_sync.py,sha256=8zEEYfir8iCUcAMFtWd8cDi8NVEaZonc4sfLAYr16io,2269 +openai/_utils/_transform.py,sha256=NCz3q9_O-vuj60xVe-qzhEQ8uJWlZWJTsM-GwHDccf8,12958 +openai/_utils/_typing.py,sha256=tFbktdpdHCQliwzGsWysgn0P5H0JRdagkZdb_LegGkY,3838 +openai/_utils/_utils.py,sha256=1_mm0IcPWDckpwQrb5chWTqeG7JWst_ycXaoFUTXbzE,11497 +openai/_version.py,sha256=hzn4X5o_qBKeLlEHZAL02BANp5jqSMLIn3fapPmuJBU,159 +openai/cli/__init__.py,sha256=soGgtqyomgddl92H0KJRqHqGuaXIaghq86qkzLuVp7U,31 +openai/cli/__pycache__/__init__.cpython-311.pyc,, +openai/cli/__pycache__/_cli.cpython-311.pyc,, +openai/cli/__pycache__/_errors.cpython-311.pyc,, +openai/cli/__pycache__/_models.cpython-311.pyc,, +openai/cli/__pycache__/_progress.cpython-311.pyc,, +openai/cli/__pycache__/_utils.cpython-311.pyc,, +openai/cli/_api/__init__.py,sha256=cj92MZq-9_1PQM8A4TQVsqKn5mcTDAGxHllJ0UvJOPE,58 +openai/cli/_api/__pycache__/__init__.cpython-311.pyc,, +openai/cli/_api/__pycache__/_main.cpython-311.pyc,, +openai/cli/_api/__pycache__/audio.cpython-311.pyc,, +openai/cli/_api/__pycache__/completions.cpython-311.pyc,, +openai/cli/_api/__pycache__/files.cpython-311.pyc,, +openai/cli/_api/__pycache__/image.cpython-311.pyc,, +openai/cli/_api/__pycache__/models.cpython-311.pyc,, +openai/cli/_api/_main.py,sha256=5yyfLURqCEaAN8B61gHaqVAaYgtyb9Xq0ncQ3P2BAh0,451 +openai/cli/_api/audio.py,sha256=HZDTRZT-qZTMsg7WOm-djCQlf874aSa3lxRvNG27wLM,3347 +openai/cli/_api/chat/__init__.py,sha256=MhFUQH9F6QCtbPMlbsU_DWTd7wc5DSCZ7Wy3FBGVij0,300 +openai/cli/_api/chat/__pycache__/__init__.cpython-311.pyc,, +openai/cli/_api/chat/__pycache__/completions.cpython-311.pyc,, +openai/cli/_api/chat/completions.py,sha256=9Ztetyz7rm0gP5SOPWEcpzFJnJKuIEQit626vOq42bE,5363 +openai/cli/_api/completions.py,sha256=ysOmnbXpFz3VB5N_5USPdObiYew62vEn6rMtNFwTJGQ,6412 +openai/cli/_api/files.py,sha256=6nKXFnsC2QE0bGnVUAG7BTLSu6K1_MhPE0ZJACmzgRY,2345 +openai/cli/_api/image.py,sha256=ovBExdn8oUK9ImOpsPafesfAlmcftLP2p7d37hcUtKU,5062 +openai/cli/_api/models.py,sha256=pGmIGZToj3raGGpKvPSq_EVUR-dqg4Vi0PNfZH98D2E,1295 +openai/cli/_cli.py,sha256=WxqTnhVVtfzX0z7hV5fcvd3hkihaUgwOWpXOwyCS4Fc,6743 +openai/cli/_errors.py,sha256=7BYF2Kp_L6yKsZDNdg-gK71FMVCNjhrunfVVgh4Zy0M,479 +openai/cli/_models.py,sha256=tgsldjG216KpwgAZ5pS0sV02FQvONDJU2ElA4kCCiIU,491 +openai/cli/_progress.py,sha256=aMLssU9jh-LoqRYH3608jNos7r6vZKnHTRlHxFznzv4,1406 +openai/cli/_tools/__init__.py,sha256=cj92MZq-9_1PQM8A4TQVsqKn5mcTDAGxHllJ0UvJOPE,58 +openai/cli/_tools/__pycache__/__init__.cpython-311.pyc,, +openai/cli/_tools/__pycache__/_main.cpython-311.pyc,, +openai/cli/_tools/__pycache__/fine_tunes.cpython-311.pyc,, +openai/cli/_tools/__pycache__/migrate.cpython-311.pyc,, +openai/cli/_tools/_main.py,sha256=pakjEXHRHqYlTml-RxV7fNrRtRXzmZBinoPi1AJipFY,467 +openai/cli/_tools/fine_tunes.py,sha256=RQgYMzifk6S7Y1I1K6huqco2QxmXa7gVUlHl6SrKTSU,1543 +openai/cli/_tools/migrate.py,sha256=NAYWN90bkhOa_AeABjEt3uOZC20HQ0gA2MNBuMrz7fM,4910 +openai/cli/_utils.py,sha256=oiTc9MnxQh_zxAZ1OIHPkoDpCll0NF9ZgkdFHz4T-Bs,848 +openai/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224 +openai/lib/__pycache__/_old_api.cpython-311.pyc,, +openai/lib/__pycache__/_validators.cpython-311.pyc,, +openai/lib/__pycache__/azure.cpython-311.pyc,, +openai/lib/_old_api.py,sha256=XZnXBrEKuTd70iJirj5mGW35fZoqruJobbBTq6bvg10,1947 +openai/lib/_validators.py,sha256=cXJXFuaAl7jeJcYHXXnFa4NHGtHs-_zt3Zs1VVCmQo4,35288 +openai/lib/azure.py,sha256=9fyl1ZIx3QV8j4F7B5nrE1mqPQ-gpdn8sQGr9jBvuUc,21479 +openai/lib/streaming/__init__.py,sha256=kD3LpjsqU7caDQDhB-YjTUl9qqbb5sPnGGSI2yQYC70,379 +openai/lib/streaming/__pycache__/__init__.cpython-311.pyc,, +openai/lib/streaming/__pycache__/_assistants.cpython-311.pyc,, +openai/lib/streaming/_assistants.py,sha256=-gU50bd7FsvvO5Sp756dQ66wsSNyemzNIlJwpxitOhM,40467 +openai/pagination.py,sha256=B9ejXEAR_hYGLHfqb9xEEsE0u5dCUMjvplOce5dpY7M,2760 +openai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +openai/resources/__init__.py,sha256=odhKOSLopY06Kz2fJy9oafb2xViXgkLRJ2vn0Kc7qJA,4166 +openai/resources/__pycache__/__init__.cpython-311.pyc,, +openai/resources/__pycache__/batches.cpython-311.pyc,, +openai/resources/__pycache__/completions.cpython-311.pyc,, +openai/resources/__pycache__/embeddings.cpython-311.pyc,, +openai/resources/__pycache__/files.cpython-311.pyc,, +openai/resources/__pycache__/images.cpython-311.pyc,, +openai/resources/__pycache__/models.cpython-311.pyc,, +openai/resources/__pycache__/moderations.cpython-311.pyc,, +openai/resources/audio/__init__.py,sha256=YM7FHvPKVlj_v6EIgfpUQsb6q4hS2hVQ3gfkgic0sP0,1687 +openai/resources/audio/__pycache__/__init__.cpython-311.pyc,, +openai/resources/audio/__pycache__/audio.cpython-311.pyc,, +openai/resources/audio/__pycache__/speech.cpython-311.pyc,, +openai/resources/audio/__pycache__/transcriptions.cpython-311.pyc,, +openai/resources/audio/__pycache__/translations.cpython-311.pyc,, +openai/resources/audio/audio.py,sha256=1HHcDRWT58KshYelRdSnJs-0bvMBRS1vOhnU-h_oP5s,4481 +openai/resources/audio/speech.py,sha256=A4_SwpCesEfHg89cxazNdrHz8JxNvUp5LlLNoMqo-0w,7876 +openai/resources/audio/transcriptions.py,sha256=bBdQZXzjamZIbe5R_Ji9JJ6W9nJCNN7EwQVinu572Pk,11128 +openai/resources/audio/translations.py,sha256=_NoBAOXYqMEtjeUhdoHF3DNb-UqnhqVrmfqgITvhajI,9070 +openai/resources/batches.py,sha256=HpMvKfSgC3F5ea8ZlmvvnJ5A0tkpzjMJkAioo4vk0Cs,17614 +openai/resources/beta/__init__.py,sha256=nXoV4P8WCrbEZuNMtptbIuy_LqlVafY9lJ2qfW35GFc,1636 +openai/resources/beta/__pycache__/__init__.cpython-311.pyc,, +openai/resources/beta/__pycache__/assistants.cpython-311.pyc,, +openai/resources/beta/__pycache__/beta.cpython-311.pyc,, +openai/resources/beta/assistants.py,sha256=cUwAeYr-JfdWE6sehSve3n-YaqOJEAFpIimjbwp9sqg,39350 +openai/resources/beta/beta.py,sha256=xw_dfi9ZpyRG4ChwweQtirWwsWxhAA4mXSV46D7pS5M,4485 +openai/resources/beta/threads/__init__.py,sha256=fQ_qdUVSfouVS5h47DlTb5mamChT4K-v-siPuuAB6do,1177 +openai/resources/beta/threads/__pycache__/__init__.cpython-311.pyc,, +openai/resources/beta/threads/__pycache__/messages.cpython-311.pyc,, +openai/resources/beta/threads/__pycache__/threads.cpython-311.pyc,, +openai/resources/beta/threads/messages.py,sha256=CBUP4HGGWRwp4nf3NwI9QdDOWYxHG1M-lqVJLYnyaRI,26157 +openai/resources/beta/threads/runs/__init__.py,sha256=2FfDaqwmJJCd-IVpY_CrzWcFvw0KFyQ3cm5jnTfI-DQ,771 +openai/resources/beta/threads/runs/__pycache__/__init__.cpython-311.pyc,, +openai/resources/beta/threads/runs/__pycache__/runs.cpython-311.pyc,, +openai/resources/beta/threads/runs/__pycache__/steps.cpython-311.pyc,, +openai/resources/beta/threads/runs/runs.py,sha256=ihJYuUGzXK6fPdcEJpUIy7cWPDWzDIjuKPUQW4aXWdo,147289 +openai/resources/beta/threads/runs/steps.py,sha256=uRykb4JapSNZCF8OD54f5qOWtrp2GoU1k5uAZgA4kAk,12223 +openai/resources/beta/threads/threads.py,sha256=v4q822rU6vUelDK4GWuj74UGwbtcsB-xFIek5UOSd7g,99552 +openai/resources/beta/vector_stores/__init__.py,sha256=11Xn1vhgndWiI0defJHv31vmbtbDgh2GwZT3gX8GgHk,1296 +openai/resources/beta/vector_stores/__pycache__/__init__.cpython-311.pyc,, +openai/resources/beta/vector_stores/__pycache__/file_batches.cpython-311.pyc,, +openai/resources/beta/vector_stores/__pycache__/files.cpython-311.pyc,, +openai/resources/beta/vector_stores/__pycache__/vector_stores.cpython-311.pyc,, +openai/resources/beta/vector_stores/file_batches.py,sha256=7lM7BSh6iLm34InyB_kvsxlJCWS-GnpUT7T5e0qllM8,29533 +openai/resources/beta/vector_stores/files.py,sha256=5yN7RmmRMmrTXpkORSllgfuPD8FoYpLHeSbUJEsqT7Q,27046 +openai/resources/beta/vector_stores/vector_stores.py,sha256=dlewvG7YwtCqXEXqaZuxU_xJEpViX2B-yN1WWaGGBKM,27141 +openai/resources/chat/__init__.py,sha256=8Q9ODRo1wIpFa34VaNwuaWFmxqFxagDtUhIAkQNvxEU,849 +openai/resources/chat/__pycache__/__init__.cpython-311.pyc,, +openai/resources/chat/__pycache__/chat.cpython-311.pyc,, +openai/resources/chat/__pycache__/completions.cpython-311.pyc,, +openai/resources/chat/chat.py,sha256=Edexhbq1anfSS_I0wNRQb7rx1OV6-rq4sxgVlYDGb6Y,2342 +openai/resources/chat/completions.py,sha256=bMo6tRzutfBcVE1FZV9U2xKBl6fJrQVq0u9S6d5eYO4,67217 +openai/resources/completions.py,sha256=RWih7eqk4LE3Q09e45fq1Kkm8FoKUI1QohAxm7Xi-uc,57169 +openai/resources/embeddings.py,sha256=cMSXtMc_7mBqlSiQ99B7qXYoRLGyoeIFazyYQ0jJ1O4,10755 +openai/resources/files.py,sha256=kXRhg8gjvjoksdJ47Y2VeKQ7Dlee09znexrGPFGR9bI,26152 +openai/resources/fine_tuning/__init__.py,sha256=s6uoq7gM4gwoywdOOZQkPeYiSbUl-OwpeuMhwJJk0lc,837 +openai/resources/fine_tuning/__pycache__/__init__.cpython-311.pyc,, +openai/resources/fine_tuning/__pycache__/fine_tuning.cpython-311.pyc,, +openai/resources/fine_tuning/fine_tuning.py,sha256=-2k4d5ZDlCIoqonSOMtGLVl4Kk9n2yJoKvVMG3PoWW8,2410 +openai/resources/fine_tuning/jobs/__init__.py,sha256=_smlrwijZOCcsDWqKnofLxQM2QLucZzXgboL9zJBPHw,849 +openai/resources/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc,, +openai/resources/fine_tuning/jobs/__pycache__/checkpoints.cpython-311.pyc,, +openai/resources/fine_tuning/jobs/__pycache__/jobs.cpython-311.pyc,, +openai/resources/fine_tuning/jobs/checkpoints.py,sha256=6uP1CCGkzE_n8FsVdTQ36eH_eiq24wOxQQ5zzOy0UEU,6456 +openai/resources/fine_tuning/jobs/jobs.py,sha256=MDluaeAYVfX0ky5Q8Nxy0Gx2DT05lXSBG8iDap53zds,26850 +openai/resources/images.py,sha256=vtVb0k94YWemgqwxq9XHDZazaxLm3S7PQDftzsXBlKk,24796 +openai/resources/models.py,sha256=XF3E56V62YZq-HrStUDDvfrT2RHj98P8Y-oOrPSPRX0,10222 +openai/resources/moderations.py,sha256=WPMrXyYXxFXHFyyF_xzg_1Uj3Xtb3KbxsnJm3SQYgcA,6685 +openai/types/__init__.py,sha256=KWciGl8_OEKoOmVP3_wHo26Qs3FMwRUl91P8CZXcHZY,2051 +openai/types/__pycache__/__init__.cpython-311.pyc,, +openai/types/__pycache__/batch.cpython-311.pyc,, +openai/types/__pycache__/batch_create_params.cpython-311.pyc,, +openai/types/__pycache__/batch_error.cpython-311.pyc,, +openai/types/__pycache__/batch_list_params.cpython-311.pyc,, +openai/types/__pycache__/batch_request_counts.cpython-311.pyc,, +openai/types/__pycache__/chat_model.cpython-311.pyc,, +openai/types/__pycache__/completion.cpython-311.pyc,, +openai/types/__pycache__/completion_choice.cpython-311.pyc,, +openai/types/__pycache__/completion_create_params.cpython-311.pyc,, +openai/types/__pycache__/completion_usage.cpython-311.pyc,, +openai/types/__pycache__/create_embedding_response.cpython-311.pyc,, +openai/types/__pycache__/embedding.cpython-311.pyc,, +openai/types/__pycache__/embedding_create_params.cpython-311.pyc,, +openai/types/__pycache__/file_content.cpython-311.pyc,, +openai/types/__pycache__/file_create_params.cpython-311.pyc,, +openai/types/__pycache__/file_deleted.cpython-311.pyc,, +openai/types/__pycache__/file_list_params.cpython-311.pyc,, +openai/types/__pycache__/file_object.cpython-311.pyc,, +openai/types/__pycache__/image.cpython-311.pyc,, +openai/types/__pycache__/image_create_variation_params.cpython-311.pyc,, +openai/types/__pycache__/image_edit_params.cpython-311.pyc,, +openai/types/__pycache__/image_generate_params.cpython-311.pyc,, +openai/types/__pycache__/images_response.cpython-311.pyc,, +openai/types/__pycache__/model.cpython-311.pyc,, +openai/types/__pycache__/model_deleted.cpython-311.pyc,, +openai/types/__pycache__/moderation.cpython-311.pyc,, +openai/types/__pycache__/moderation_create_params.cpython-311.pyc,, +openai/types/__pycache__/moderation_create_response.cpython-311.pyc,, +openai/types/audio/__init__.py,sha256=slwR2gZwYMmTpPihbr1a2rryQuyfqeAGzgjluQwlmN4,494 +openai/types/audio/__pycache__/__init__.cpython-311.pyc,, +openai/types/audio/__pycache__/speech_create_params.cpython-311.pyc,, +openai/types/audio/__pycache__/transcription.cpython-311.pyc,, +openai/types/audio/__pycache__/transcription_create_params.cpython-311.pyc,, +openai/types/audio/__pycache__/translation.cpython-311.pyc,, +openai/types/audio/__pycache__/translation_create_params.cpython-311.pyc,, +openai/types/audio/speech_create_params.py,sha256=uae8hceXzm75E3QXBC9dRMunYA2Mj2m7lUiG_fbuN70,1278 +openai/types/audio/transcription.py,sha256=jP13KGV0ZSgK3FkIZueDLrH4Yhafp5FkXBEP85deBAo,231 +openai/types/audio/transcription_create_params.py,sha256=H7LOzb4VHwhF_cm0MXMIDgfglmbu-T-gcrp1i2HJBqI,2226 +openai/types/audio/translation.py,sha256=_PhTtQ-s1yc-4kAKlgc88FTqUpXnNYfM2ld5IuRRGkA,195 +openai/types/audio/translation_create_params.py,sha256=pynqbAozfcVwu1U6C6xvauZSFlQxIz1cswSXJLfRI30,1506 +openai/types/batch.py,sha256=eIOIaJnDuv93fdefTI0WRfTm7MZH8gLBdF0B12JCiZw,2787 +openai/types/batch_create_params.py,sha256=Kh4ZGVNBFpO3mHakKNSktaUPc-cLpBrlh9RqyLjsnqk,1183 +openai/types/batch_error.py,sha256=Xxl-gYm0jerpYyI-mKSSVxRMQRubkoLUiOP9U3v72EM,622 +openai/types/batch_list_params.py,sha256=X1_sfRspuIMSDyXWVh0YnJ9vJLeOOH66TrvgEHueC84,705 +openai/types/batch_request_counts.py,sha256=nOzdL84OlZRycVNW99EDkdjCFqqKh68emaWT4Lx7dBE,410 +openai/types/beta/__init__.py,sha256=z2VmuulluQs5lVF22u2-FvbTQLpVhtz6hEcM1iUAXZc,2919 +openai/types/beta/__pycache__/__init__.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_create_params.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_deleted.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_list_params.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_response_format.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_response_format_option.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_response_format_option_param.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_response_format_param.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_stream_event.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool_choice.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool_choice_function.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool_choice_function_param.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool_choice_option.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool_choice_option_param.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool_choice_param.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_tool_param.cpython-311.pyc,, +openai/types/beta/__pycache__/assistant_update_params.cpython-311.pyc,, +openai/types/beta/__pycache__/code_interpreter_tool.cpython-311.pyc,, +openai/types/beta/__pycache__/code_interpreter_tool_param.cpython-311.pyc,, +openai/types/beta/__pycache__/file_search_tool.cpython-311.pyc,, +openai/types/beta/__pycache__/file_search_tool_param.cpython-311.pyc,, +openai/types/beta/__pycache__/function_tool.cpython-311.pyc,, +openai/types/beta/__pycache__/function_tool_param.cpython-311.pyc,, +openai/types/beta/__pycache__/thread.cpython-311.pyc,, +openai/types/beta/__pycache__/thread_create_and_run_params.cpython-311.pyc,, +openai/types/beta/__pycache__/thread_create_params.cpython-311.pyc,, +openai/types/beta/__pycache__/thread_deleted.cpython-311.pyc,, +openai/types/beta/__pycache__/thread_update_params.cpython-311.pyc,, +openai/types/beta/__pycache__/vector_store.cpython-311.pyc,, +openai/types/beta/__pycache__/vector_store_create_params.cpython-311.pyc,, +openai/types/beta/__pycache__/vector_store_deleted.cpython-311.pyc,, +openai/types/beta/__pycache__/vector_store_list_params.cpython-311.pyc,, +openai/types/beta/__pycache__/vector_store_update_params.cpython-311.pyc,, +openai/types/beta/assistant.py,sha256=9lrwz2SdGMf553qzYltklaVSKtdQIfR7WKBFJgUr_cg,4615 +openai/types/beta/assistant_create_params.py,sha256=bgrU6XrpJEkKF2v3gSkLQYYv-uHPXcNk4H01Ngrtzwg,6059 +openai/types/beta/assistant_deleted.py,sha256=bTTUl5FPHTBI5nRm7d0sGuR9VCSBDZ-IbOn9G_IpmJQ,301 +openai/types/beta/assistant_list_params.py,sha256=1-osjSX8tKieHSP0xaKBBU8j-J01fKrrxIJRHDudFHk,1220 +openai/types/beta/assistant_response_format.py,sha256=-JYxEihoHEHMak9E7KiyD5Zh_f3c-155j110mBDTFNE,378 +openai/types/beta/assistant_response_format_option.py,sha256=pDRz-lm-ASYhVIslXCulGAtO0c9Ulr6zVz-VltQQOh4,348 +openai/types/beta/assistant_response_format_option_param.py,sha256=JSH4wXdfgQBLMUagfVCn3clk9eErAUAiyZSQZ2XM-2w,410 +openai/types/beta/assistant_response_format_param.py,sha256=qtkwEg3hG3_ewmHH3E1hXsQqVqyMSTIOGFN9R1WTW0g,369 +openai/types/beta/assistant_stream_event.py,sha256=JprbttORwq5mJUpyziwCHH7vXBzuSqU-MbfNHWpeTEw,6529 +openai/types/beta/assistant_tool.py,sha256=ci9elhBtBQY3_0FefsDuKxyLLRrl5m9e_PSvShZqTSo,478 +openai/types/beta/assistant_tool_choice.py,sha256=Hy4HIfPQCkWD8VruHHicuTkomNwljGHviQHk36prKhg,544 +openai/types/beta/assistant_tool_choice_function.py,sha256=lMEPJrd2nIeNeTFTRKj8OTJmS--Zvu6kmzqjFR_iBlQ,271 +openai/types/beta/assistant_tool_choice_function_param.py,sha256=-O38277LhSaqOVhTp0haHP0ZnVTLpEBvcLJa5MRo7wE,355 +openai/types/beta/assistant_tool_choice_option.py,sha256=WaLj1FSgQyLrss5hoKbmb19C0hzD5_WP3bWgzNdZIMM,340 +openai/types/beta/assistant_tool_choice_option_param.py,sha256=ODCix7ElFxtyABiL09OhaYbQy9RjICCSmILeqBFWeLE,402 +openai/types/beta/assistant_tool_choice_param.py,sha256=NOWx9SzZEwYaHeAyFZTQlG3pmogMNXzjPJDGQUlbv7Q,572 +openai/types/beta/assistant_tool_param.py,sha256=xsB-Vq93uyS69m5zMoAc7keLXB_OSwEUH6XgB2g3ex4,450 +openai/types/beta/assistant_update_params.py,sha256=8YGYglHCQhoBCleaaKsDmR13LijeDgrhIhQ5Lo8B1L0,4363 +openai/types/beta/chat/__init__.py,sha256=OKfJYcKb4NObdiRObqJV_dOyDQ8feXekDUge2o_4pXQ,122 +openai/types/beta/chat/__pycache__/__init__.cpython-311.pyc,, +openai/types/beta/code_interpreter_tool.py,sha256=7mgQc9OtD_ZUnZeNhoobMFcmmvtZPFCNYGB-PEnNnfs,333 +openai/types/beta/code_interpreter_tool_param.py,sha256=X6mwzFyZx1RCKEYbBCPs4kh_tZkxFxydPMK4yFNJkLs,389 +openai/types/beta/file_search_tool.py,sha256=u7Dw4G956UhhSF6zUvMU2Pyt9px3QEz8cno9Au_DofQ,313 +openai/types/beta/file_search_tool_param.py,sha256=nAON5EUoano9jVPYZMzMYMLCxde_43NBgtooPFpZcyU,369 +openai/types/beta/function_tool.py,sha256=oYGJfcfPpUohKw2ikgshDjOI1HXCK-5pAWyegYNezeU,397 +openai/types/beta/function_tool_param.py,sha256=T_k2OX1OULgkrHHXw0rY_J-O0y5qA0lM-B58C64YyfM,453 +openai/types/beta/thread.py,sha256=wd00j3ogUpOa_O0Sf1m6H4f8t1Nf05DKWiK_4m33O6s,2013 +openai/types/beta/thread_create_and_run_params.py,sha256=i-WBCNm1aCUKn-TC5xvv4p1W0DtyEPKvjoEq_ang9Ks,12517 +openai/types/beta/thread_create_params.py,sha256=A1JaofbmpOpA7gs8F-Olj5MS7ZwHD3xEIH_GQrWDD80,4389 +openai/types/beta/thread_deleted.py,sha256=MaYG_jZIjSiB9h_ZBiTtpMsRSwFKkCY83ziM5GO_oUk,292 +openai/types/beta/thread_update_params.py,sha256=RYsR88YHwReKLiLqnLlnWiReiVIGlEGvVV9-g_wptgM,1750 +openai/types/beta/threads/__init__.py,sha256=dneukcPQuYkP0N7UTmrbUXpjCl_9Uv1MpWJuAJCTE-A,2156 +openai/types/beta/threads/__pycache__/__init__.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/annotation.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/annotation_delta.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/file_citation_annotation.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/file_citation_delta_annotation.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/file_path_annotation.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/file_path_delta_annotation.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/image_file.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/image_file_content_block.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/image_file_delta.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/image_file_delta_block.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_content.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_content_delta.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_create_params.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_deleted.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_delta.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_delta_event.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_list_params.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/message_update_params.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/required_action_function_tool_call.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/run.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/run_create_params.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/run_list_params.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/run_status.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/run_submit_tool_outputs_params.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/run_update_params.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/text.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/text_content_block.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/text_delta.cpython-311.pyc,, +openai/types/beta/threads/__pycache__/text_delta_block.cpython-311.pyc,, +openai/types/beta/threads/annotation.py,sha256=3VHiyDhcR2G-cQ48_itBsXDWlmfpUJ7rnjeMh_DsHgg,440 +openai/types/beta/threads/annotation_delta.py,sha256=aJ1A_paDRkRVivuCZrmOL4QRvVW3KmZxsGUgOJ7uzUU,488 +openai/types/beta/threads/file_citation_annotation.py,sha256=0-0cZw65Xp3Wuq3zcaeK-we2jUchZX5dsxriIpXYH38,653 +openai/types/beta/threads/file_citation_delta_annotation.py,sha256=R87tcXkJ0RiH5UJo0Qknwk7X_c4qF1qvGsu2spOPx-I,873 +openai/types/beta/threads/file_path_annotation.py,sha256=hNc4ebprJynqMG1yk0gLvgzTpjtVzgEbXriMZftkgew,552 +openai/types/beta/threads/file_path_delta_annotation.py,sha256=RW9dgDF9Ggf357fPZ-vUu2ge3U-Hf11DVTr-ecklsBY,755 +openai/types/beta/threads/image_file.py,sha256=dLOJ9_oTVoJllJqS96NfLQQ7i0To0up25TLl5OdXRt4,324 +openai/types/beta/threads/image_file_content_block.py,sha256=31I5trSERP2qLZpJ4ugZtIyta4DDoBhBvxkM4LovL3w,363 +openai/types/beta/threads/image_file_delta.py,sha256=RXiEnWIItjoRKiwecJ0LLWzils8zNvb80R_j1ZWcqQM,378 +openai/types/beta/threads/image_file_delta_block.py,sha256=XJ2YVX_cq0OiNcGbNmXO0_dca1IvPockOvvoM7pDvbI,492 +openai/types/beta/threads/message.py,sha256=MgO0oD_sXfGpTPhsLDCgg_bXeNeGAyUGYaWlsbMN9rc,3056 +openai/types/beta/threads/message_content.py,sha256=iAQm3X-YXbbkLpob_S3J4PnqTEdN_V_qfZAR-yolpTY,440 +openai/types/beta/threads/message_content_delta.py,sha256=9OiciDh1vCUT6r0q2ta-QTlORr5ESALZUNO0BYOeQns,438 +openai/types/beta/threads/message_create_params.py,sha256=KoP-6M3riWV5n3oEbmBBoU2v9prx7CGr_hXk0jF-fGo,1689 +openai/types/beta/threads/message_deleted.py,sha256=DNnrSfGZ3kWEazmo4mVTdLhiKlIHxs-D8Ef5sNdHY1o,303 +openai/types/beta/threads/message_delta.py,sha256=-kaRyvnIA8Yr2QV5jKRn15BU2Ni068a_WtWJ4PqlLfE,570 +openai/types/beta/threads/message_delta_event.py,sha256=7SpE4Dd3Lrc_cm97SzBwZzGGhfLqiFViDeTRQz-5YmQ,579 +openai/types/beta/threads/message_list_params.py,sha256=LXqc3deSkKO6VN337OlQ4fzG7dfgBE7Iv_CLzZHhbhw,1294 +openai/types/beta/threads/message_update_params.py,sha256=bw6_U-vZA4c9_CDmeGOh7IEPIm8BU3BBOKtxnii0LKA,629 +openai/types/beta/threads/required_action_function_tool_call.py,sha256=XsR4OBbxI-RWteLvhcLEDBan6eUUGvhLORFRKjPbsLg,888 +openai/types/beta/threads/run.py,sha256=D6TDDeIGMS39jc2TVY4HrVw0mpBDXhro9VIzeH2ejdg,7656 +openai/types/beta/threads/run_create_params.py,sha256=b37YY0_xLHNJjDJeKZoj38yJ-iGNrZFH1JcrkqDPqWI,8868 +openai/types/beta/threads/run_list_params.py,sha256=73poqeRcb5TEsIVn7OzJ_g9OajNokEzpCVLzVNKZmPk,1208 +openai/types/beta/threads/run_status.py,sha256=6KPJB7l0YfGSKzx4wuIP8SDiZSiaD2nb0KOf0uRPDP4,282 +openai/types/beta/threads/run_submit_tool_outputs_params.py,sha256=aDrg0FZZoJKaPVQzcFjUg4ZKaeW8KF6UJBxhJEIjC2I,1630 +openai/types/beta/threads/run_update_params.py,sha256=76dWMNa3zCUliemCdwWv6p07GNeMYCdZoJs9KNbdZSE,621 +openai/types/beta/threads/runs/__init__.py,sha256=uhxk5F1_5c5wg2_p70AjlOy9cE3Ga8-ILn4Ep-gcls4,1515 +openai/types/beta/threads/runs/__pycache__/__init__.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/code_interpreter_logs.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/code_interpreter_output_image.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call_delta.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/file_search_tool_call.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/file_search_tool_call_delta.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/function_tool_call.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/function_tool_call_delta.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/message_creation_step_details.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/run_step.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/run_step_delta.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/run_step_delta_event.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/run_step_delta_message_delta.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/step_list_params.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/tool_call.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/tool_call_delta.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/tool_call_delta_object.cpython-311.pyc,, +openai/types/beta/threads/runs/__pycache__/tool_calls_step_details.cpython-311.pyc,, +openai/types/beta/threads/runs/code_interpreter_logs.py,sha256=7wXZpUE9I-oZJ0K3mFG0Nwmfm2bKGiSpWJyBeo7txwo,482 +openai/types/beta/threads/runs/code_interpreter_output_image.py,sha256=8o99k0ZHMHpqH0taXkOkYR9WaDUpCN-G0Ifd5XsJpb8,613 +openai/types/beta/threads/runs/code_interpreter_tool_call.py,sha256=Ydsi3ob7fyv1MqPY6tlZCD254Cc5XNLO-ddEGtKdqj4,1788 +openai/types/beta/threads/runs/code_interpreter_tool_call_delta.py,sha256=eD-tvfFD7arq4w7dzQJFkmHrvLguVrDjpAJRNH6EwIE,1457 +openai/types/beta/threads/runs/file_search_tool_call.py,sha256=PPxrJP3r4RWFTeE5mU-9SbFz37JmKHOGfsxlZGydyW0,522 +openai/types/beta/threads/runs/file_search_tool_call_delta.py,sha256=Gx8c7GSgGYuOvGadcAr3ZIspEFMZS3e2OY7vBo_MYnM,655 +openai/types/beta/threads/runs/function_tool_call.py,sha256=aOq5yOtKOi6C5Q1FIQRxqtJJR1AcSW_K5PvRiKISNCI,920 +openai/types/beta/threads/runs/function_tool_call_delta.py,sha256=VFRtCJkj4PHX97upM1cXpJAk9-JvJSgyngie06fBIjQ,1076 +openai/types/beta/threads/runs/message_creation_step_details.py,sha256=tRFMNF2Rf4DekVliUKkoujItiOjjAE9EG9bbxJvpVPA,506 +openai/types/beta/threads/runs/run_step.py,sha256=UvPakztDIofP8K80Q1gfQSXF18xxp2w9KWRwrcHhjnE,3440 +openai/types/beta/threads/runs/run_step_delta.py,sha256=lNPH43tdQMHHEiaxaS0FtLXsqtH5xOJpYJlAroj7PHg,635 +openai/types/beta/threads/runs/run_step_delta_event.py,sha256=rkDyvHSXt-hc1LngB41f9vglkn6t03kS62bsn0iGaxU,585 +openai/types/beta/threads/runs/run_step_delta_message_delta.py,sha256=UIo6oPH8STLjPHiWL-A4CtKfYe49uptvIAHWNnZ3Ums,564 +openai/types/beta/threads/runs/step_list_params.py,sha256=2vMPFMElvK135ncP9ch6kUnzPGOSIPT3Eio18jJhAqk,1250 +openai/types/beta/threads/runs/tool_call.py,sha256=zyck1JNKBPCIGCMrJN6P850D10Y36FO6LwrX2WM_YR8,515 +openai/types/beta/threads/runs/tool_call_delta.py,sha256=OZeU5fF-77_oG87xNVn_wZo4SpDfjJ5ND9rIQQYKPoE,578 +openai/types/beta/threads/runs/tool_call_delta_object.py,sha256=eK20VsIswEyT48XbkGu60HUrE7OD3fhpn1fbXrVauM4,615 +openai/types/beta/threads/runs/tool_calls_step_details.py,sha256=bDa-yybVF3a8H6VqhDGmFZMkpn-0gtPQM2jWWsmUvYo,574 +openai/types/beta/threads/text.py,sha256=9gjmDCqoptnxQ8Jhym87pECyd6m1lB3daCxKNzSFp4Y,319 +openai/types/beta/threads/text_content_block.py,sha256=pdGlKYM1IF9PjTvxjxo1oDg1XeGCFdJdl0kJVpZ7jIs,319 +openai/types/beta/threads/text_delta.py,sha256=2EFeQCkg_cc8nYEJ6BtYAA3_TqgMTbmEXoMvLjzaB34,389 +openai/types/beta/threads/text_delta_block.py,sha256=pkHkVBgNsmHi9JURzs5ayPqxQXSkex3F0jH0MqJXik0,448 +openai/types/beta/vector_store.py,sha256=zaSaSUpStD3iuyas9f7VQCNF1byxnXRz_5q36eizNGE,2353 +openai/types/beta/vector_store_create_params.py,sha256=tddpQ1KDswqOqzy-ijmSEN5_A-QL8-RbjD8uUf1w5XY,1321 +openai/types/beta/vector_store_deleted.py,sha256=Yq0E1orRLShseLwZ1deiBdDEUgEw_tcYVxGYa5gbIrM,308 +openai/types/beta/vector_store_list_params.py,sha256=8iUgSgs_TeehprKjtTLWOGeH_R8LbDdLkdwMq9xVpSA,1224 +openai/types/beta/vector_store_update_params.py,sha256=AHlOV4f36UWAH4k7XKlGa51Mfao2f7339qI3fskWbIk,1114 +openai/types/beta/vector_stores/__init__.py,sha256=gXfm8V5Ad0iueaC_VoHDUQvSdwSfBzk2cQNwZldvY0s,671 +openai/types/beta/vector_stores/__pycache__/__init__.cpython-311.pyc,, +openai/types/beta/vector_stores/__pycache__/file_batch_create_params.cpython-311.pyc,, +openai/types/beta/vector_stores/__pycache__/file_batch_list_files_params.cpython-311.pyc,, +openai/types/beta/vector_stores/__pycache__/file_create_params.cpython-311.pyc,, +openai/types/beta/vector_stores/__pycache__/file_list_params.cpython-311.pyc,, +openai/types/beta/vector_stores/__pycache__/vector_store_file.cpython-311.pyc,, +openai/types/beta/vector_stores/__pycache__/vector_store_file_batch.cpython-311.pyc,, +openai/types/beta/vector_stores/__pycache__/vector_store_file_deleted.cpython-311.pyc,, +openai/types/beta/vector_stores/file_batch_create_params.py,sha256=XohBafDsiYkBXDWE32W62UGKBL4jRyyItjIZBIzqQmo,519 +openai/types/beta/vector_stores/file_batch_list_files_params.py,sha256=6c_KvnlFV0vkFid_thhyEK6HC6F1ixbDh2roExL_-qk,1449 +openai/types/beta/vector_stores/file_create_params.py,sha256=0LOEMzQYWoGW6HFrDNhXu1YF_rPwDv28C0yPA5WXyoU,469 +openai/types/beta/vector_stores/file_list_params.py,sha256=UC6NzZQ79tInL8xV3pMm66IFWsIT9PW_BhSbQLm4ar4,1383 +openai/types/beta/vector_stores/vector_store_file.py,sha256=_08rc2lNwXI8keTI6DBGa55DJ12JvxlS2qHwE6iqptQ,1645 +openai/types/beta/vector_stores/vector_store_file_batch.py,sha256=ubvj8z95EOdRGAp0rgI94g5uFQx0ob8hLgwOWHKda4E,1457 +openai/types/beta/vector_stores/vector_store_file_deleted.py,sha256=37J7oL2WYCgOd7Rhg2jX6IavaZT63vgUf3u6LC6C3Hs,322 +openai/types/chat/__init__.py,sha256=waUSt926WgCuTOyde_4XXkn36Hd6GhiPZr2KWYMZVy0,2464 +openai/types/chat/__pycache__/__init__.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_assistant_message_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_chunk.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_content_part_image_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_content_part_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_content_part_text_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_function_call_option_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_function_message_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_message.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_message_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_message_tool_call.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_message_tool_call_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_named_tool_choice_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_role.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_system_message_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_token_logprob.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_tool_choice_option_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_tool_message_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_tool_param.cpython-311.pyc,, +openai/types/chat/__pycache__/chat_completion_user_message_param.cpython-311.pyc,, +openai/types/chat/__pycache__/completion_create_params.cpython-311.pyc,, +openai/types/chat/chat_completion.py,sha256=DKbYEGcHPzNZyr8tSSbH1CsZJmRmS48SQOvolMByRx4,2321 +openai/types/chat/chat_completion_assistant_message_param.py,sha256=D2wua_9eZnKZEKu-0OC3o5w6nThu7f4HndthRVN6VsQ,1638 +openai/types/chat/chat_completion_chunk.py,sha256=Nmmaai-u3dc6lSA0abDte-WltRpayzOxO-X87vEJVU4,4095 +openai/types/chat/chat_completion_content_part_image_param.py,sha256=ODHcWpe8TIXZQHXHhEEacrRHm_TCaFWZnml-bD85XiU,797 +openai/types/chat/chat_completion_content_part_param.py,sha256=XGzw9ocldPg6Ke3ykNRuoxfORAAPtWXe4_SP1iURTDc,486 +openai/types/chat/chat_completion_content_part_text_param.py,sha256=4IpiXMKM9AuTyop5PRptPBbBhh9s93xy2vjg4Yw6NIw,429 +openai/types/chat/chat_completion_function_call_option_param.py,sha256=M-IqWHyBLkvYBcwFxxp4ydCIxbPDaMlNl4bik9UoFd4,365 +openai/types/chat/chat_completion_function_message_param.py,sha256=jIaZbBHHbt4v4xHCIyvYtYLst_X4jOznRjYNcTf0MF0,591 +openai/types/chat/chat_completion_message.py,sha256=19e2EL6cHZA6EeOVPgI_LbN3UwNLKizhtxuXnxLzhX0,1282 +openai/types/chat/chat_completion_message_param.py,sha256=RGdT7OjJPQTd2M0drDVNxBkUB-9DHMkQjNolaOY9nw0,838 +openai/types/chat/chat_completion_message_tool_call.py,sha256=XlIe2vhSYvrt8o8Yol5AQqnacI1xHqpEIV26G4oNrZY,900 +openai/types/chat/chat_completion_message_tool_call_param.py,sha256=XNhuUpGr5qwVTo0K8YavJwleHYSdwN_urK51eKlqC24,1009 +openai/types/chat/chat_completion_named_tool_choice_param.py,sha256=JsxfSJYpOmF7zIreQ0JrXRSLp07OGCBSycRRcF6OZmg,569 +openai/types/chat/chat_completion_role.py,sha256=F5BlM6FMrJmqtCx3-W-KjhXXrVYAWv87_alwF7fOTSM,240 +openai/types/chat/chat_completion_system_message_param.py,sha256=qWEJupmzMuUa82V7OoLeQF92SKE1QoU4cXfX2o43x9E,638 +openai/types/chat/chat_completion_token_logprob.py,sha256=6-ipUFfsXMf5L7FDFi127NaVkDtmEooVgGBF6Ts965A,1769 +openai/types/chat/chat_completion_tool_choice_option_param.py,sha256=cGMIgf6e5KG1xbP1_dg-S_ktD78ECkDAPFekFBHH0PU,444 +openai/types/chat/chat_completion_tool_message_param.py,sha256=B-PST-J1VwPjaKLpzpmqfEsHlr5Owb54dnQoIhbvuY4,553 +openai/types/chat/chat_completion_tool_param.py,sha256=sve2G1DayUs-1CMzXK1x104r8KTa5K62CZdxoyLmFlk,485 +openai/types/chat/chat_completion_user_message_param.py,sha256=mik-MRkwb543C5FSJ52LtTkeA2E_HdLUgtoHEdO73XQ,792 +openai/types/chat/completion_create_params.py,sha256=4ma76CdDBlA8hoJYRerzGCZ2VsW7i-P0wRr4vgh0nwU,10008 +openai/types/chat_model.py,sha256=svOsfQBsRI9d4Fb9e2TUBi5e5VE2CHCN5bFMVABhKBY,620 +openai/types/completion.py,sha256=yuYVEVkJcMVUINNLglkxOJqCx097HKCYFeJun3Js73A,1172 +openai/types/completion_choice.py,sha256=PUk77T3Cp34UJSXoMfSzTKGWDK0rQQwq84X_PSlOUJo,965 +openai/types/completion_create_params.py,sha256=wc4u6Rk1ogmEQ9h44GNReENq8xlqYGs_YyV6PgmLNDM,7329 +openai/types/completion_usage.py,sha256=MIa0LipVCM88I9h71aXF_aVkNVt47iTa74gDtWvDDTA,436 +openai/types/create_embedding_response.py,sha256=lTAu_Pym76kFljDnnDRoDB2GNQSzWmwwlqf5ff7FNPM,798 +openai/types/embedding.py,sha256=2pV6RTSf5UV6E86Xeud5ZwmjQjMS93m_4LrQ0GN3fho,637 +openai/types/embedding_create_params.py,sha256=3p7U8i2uG1SCpELbn_IeDMLkFe-vv7cyB5dx-_4U8iU,1885 +openai/types/file_content.py,sha256=E2CsQejO19KSjdShjg5nsCtS4BbBwhPVDSfFEUA8ZNM,133 +openai/types/file_create_params.py,sha256=gpZJLxy2Q7zPrfYY_fFEF19P5BDldzHx7v0sCPLgCMw,873 +openai/types/file_deleted.py,sha256=H_r9U7XthT5xHAo_4ay1EGGkc21eURt8MkkIBRYiQcw,277 +openai/types/file_list_params.py,sha256=VhZbSrCO0fYnUTgPE_nuBy-3A5MjpXiBtI-BahAc5SY,310 +openai/types/file_object.py,sha256=9AHXLSU2ntSagFzh96i0qDYxeQOzDeMkIUPU9hmeEFI,1226 +openai/types/fine_tuning/__init__.py,sha256=SZvjq_22oY9E4zcnrvVd0ul9U4sk_IBeOd0MsNALu5s,806 +openai/types/fine_tuning/__pycache__/__init__.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/fine_tuning_job.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/fine_tuning_job_event.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/fine_tuning_job_integration.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration_object.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/job_create_params.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/job_list_events_params.cpython-311.pyc,, +openai/types/fine_tuning/__pycache__/job_list_params.cpython-311.pyc,, +openai/types/fine_tuning/fine_tuning_job.py,sha256=YOcsIJZPPAqOnQudOkS_Am-peQuHyyvcMWVDxFvJdEA,3861 +openai/types/fine_tuning/fine_tuning_job_event.py,sha256=oCkO0yImLZnZQLeU4GH6YyUlDG25pzs41SCWWB-sd_o,374 +openai/types/fine_tuning/fine_tuning_job_integration.py,sha256=YZI3gQSE9zhfAcghYryzoug_IPfdog_fsjf2eCIMzD8,243 +openai/types/fine_tuning/fine_tuning_job_wandb_integration.py,sha256=YnBeiz14UuhUSpnD0KBj5V143qLvJbDIMcUVWOCBLXY,1026 +openai/types/fine_tuning/fine_tuning_job_wandb_integration_object.py,sha256=7vEc2uEV2c_DENBjhq0Qy5X8B-rzxsKvGECjnvF1Wdw,804 +openai/types/fine_tuning/job_create_params.py,sha256=3wbT1U-LnSHyTa2AnRaCHeQhA_I4GP0oyoBx-2D54N4,4407 +openai/types/fine_tuning/job_list_events_params.py,sha256=4xOED4H2ky2mI9sIDytjmfJz5bNAdNWb70WIb_0bBWs,400 +openai/types/fine_tuning/job_list_params.py,sha256=yjxaEnESVTRpJ9ItvjKq30KcD_xz_trqKMIxG2eAriE,396 +openai/types/fine_tuning/jobs/__init__.py,sha256=nuWhOUsmsoVKTKMU35kknmr8sfpTF-kkIzyuOlRbJj0,295 +openai/types/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc,, +openai/types/fine_tuning/jobs/__pycache__/checkpoint_list_params.cpython-311.pyc,, +openai/types/fine_tuning/jobs/__pycache__/fine_tuning_job_checkpoint.cpython-311.pyc,, +openai/types/fine_tuning/jobs/checkpoint_list_params.py,sha256=XoDLkkKCWmf5an5rnoVEpNK8mtQHq1fHw9EqmezfrXM,415 +openai/types/fine_tuning/jobs/fine_tuning_job_checkpoint.py,sha256=Z_sUhebJY9nWSssZU7QoOJwe5sez76sCAuVeSO63XhY,1347 +openai/types/image.py,sha256=9No-8GHesOUbjchemY1jqtMwh_s22oBmLVFlLn2KoQo,607 +openai/types/image_create_variation_params.py,sha256=3f0qYfKrSuYA2gv7lyCq0FsRM36QctZ_Ki2YPLeNNj4,1450 +openai/types/image_edit_params.py,sha256=oQIiKqlU_59H1f0HtBlQw_BJ7mBEXRispfoGuDnfXHI,1810 +openai/types/image_generate_params.py,sha256=YztuD1oHepGqmP-m78Uhay67IgwGk7CspdAn2YWihlw,2116 +openai/types/images_response.py,sha256=EJ4qxYZ8CPGh2SZdRsyw6I0FnUvlgwxwc4NgPovJrvk,274 +openai/types/model.py,sha256=DMw8KwQx8B6S6sAI038D0xdzkmYdY5-r0oMhCUG4l6w,532 +openai/types/model_deleted.py,sha256=rDGU-Ul4lMfNf5XxKNxZKo9CQPGsrkrzqnhl00GLMi4,230 +openai/types/moderation.py,sha256=ihR2jzld_BfOaHW1_6A2csTInEaJvAl5nPxuh_jegY4,3933 +openai/types/moderation_create_params.py,sha256=Rz8kzoisqPihOLdPjrSchM0uml5VPHV8DqcrE56rwUs,954 +openai/types/moderation_create_response.py,sha256=e6SVfWX2_JX25Za0C6KojcnbMTtDB2A7cjUm6cFMKcs,484 +openai/types/shared/__init__.py,sha256=eoiCHGKeY1_YjOn41M8QxvIUI_M68Ltsr1d67g_Pr-I,288 +openai/types/shared/__pycache__/__init__.cpython-311.pyc,, +openai/types/shared/__pycache__/error_object.cpython-311.pyc,, +openai/types/shared/__pycache__/function_definition.cpython-311.pyc,, +openai/types/shared/__pycache__/function_parameters.cpython-311.pyc,, +openai/types/shared/error_object.py,sha256=G7SGPZ9Qw3gewTKbi3fK69eM6L2Ur0C2D57N8iEapJA,305 +openai/types/shared/function_definition.py,sha256=n505SpWCIf_ntWZZ8liz0rcLhLxUsdnULsM5IA0fBUk,1067 +openai/types/shared/function_parameters.py,sha256=jhabBaJFMgWfFduqmKQ0dkKfK5DWlwgde30SlZVcCYc,185 +openai/types/shared_params/__init__.py,sha256=Jaw3mmmUB3Ky7vL1fzsh-8kAJEbeYxcQ0JOy7p765Xo,235 +openai/types/shared_params/__pycache__/__init__.cpython-311.pyc,, +openai/types/shared_params/__pycache__/function_definition.cpython-311.pyc,, +openai/types/shared_params/__pycache__/function_parameters.cpython-311.pyc,, +openai/types/shared_params/function_definition.py,sha256=zq61IKY91bRJ346qkrS3_5w3R-xKgEEIdkXdN-Zj9Uc,1078 +openai/types/shared_params/function_parameters.py,sha256=vqZAZwPBh14Ykp84NFTXF_j0eoDyqF9V_d8-_n-KF9w,221 +openai/version.py,sha256=cjbXKO8Ut3aiv4YlQnugff7AdC48MpSndcx96q88Yb8,62 diff --git a/.venv/Lib/site-packages/openai-1.25.2.dist-info/REQUESTED b/.venv/Lib/site-packages/openai-1.25.2.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/openai-1.25.2.dist-info/WHEEL b/.venv/Lib/site-packages/openai-1.25.2.dist-info/WHEEL new file mode 100644 index 00000000..516596c7 --- /dev/null +++ b/.venv/Lib/site-packages/openai-1.25.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.24.2 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/Lib/site-packages/openai-1.25.2.dist-info/entry_points.txt b/.venv/Lib/site-packages/openai-1.25.2.dist-info/entry_points.txt new file mode 100644 index 00000000..98999396 --- /dev/null +++ b/.venv/Lib/site-packages/openai-1.25.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +openai = openai.cli:main diff --git a/.venv/Lib/site-packages/openai-1.25.2.dist-info/licenses/LICENSE b/.venv/Lib/site-packages/openai-1.25.2.dist-info/licenses/LICENSE new file mode 100644 index 00000000..621a6bec --- /dev/null +++ b/.venv/Lib/site-packages/openai-1.25.2.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2024 OpenAI + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.venv/Lib/site-packages/openai/__init__.py b/.venv/Lib/site-packages/openai/__init__.py new file mode 100644 index 00000000..0e87ae92 --- /dev/null +++ b/.venv/Lib/site-packages/openai/__init__.py @@ -0,0 +1,357 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os as _os +from typing_extensions import override + +from . import types +from ._types import NOT_GIVEN, NoneType, NotGiven, Transport, ProxiesTypes +from ._utils import file_from_path +from ._client import Client, OpenAI, Stream, Timeout, Transport, AsyncClient, AsyncOpenAI, AsyncStream, RequestOptions +from ._models import BaseModel +from ._version import __title__, __version__ +from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse +from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS +from ._exceptions import ( + APIError, + OpenAIError, + ConflictError, + NotFoundError, + APIStatusError, + RateLimitError, + APITimeoutError, + BadRequestError, + APIConnectionError, + AuthenticationError, + InternalServerError, + PermissionDeniedError, + UnprocessableEntityError, + APIResponseValidationError, +) +from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient +from ._utils._logs import setup_logging as _setup_logging + +__all__ = [ + "types", + "__version__", + "__title__", + "NoneType", + "Transport", + "ProxiesTypes", + "NotGiven", + "NOT_GIVEN", + "OpenAIError", + "APIError", + "APIStatusError", + "APITimeoutError", + "APIConnectionError", + "APIResponseValidationError", + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", + "Timeout", + "RequestOptions", + "Client", + "AsyncClient", + "Stream", + "AsyncStream", + "OpenAI", + "AsyncOpenAI", + "file_from_path", + "BaseModel", + "DEFAULT_TIMEOUT", + "DEFAULT_MAX_RETRIES", + "DEFAULT_CONNECTION_LIMITS", + "DefaultHttpxClient", + "DefaultAsyncHttpxClient", +] + +from .lib import azure as _azure +from .version import VERSION as VERSION +from .lib.azure import AzureOpenAI as AzureOpenAI, AsyncAzureOpenAI as AsyncAzureOpenAI +from .lib._old_api import * +from .lib.streaming import ( + AssistantEventHandler as AssistantEventHandler, + AsyncAssistantEventHandler as AsyncAssistantEventHandler, +) + +_setup_logging() + +# Update the __module__ attribute for exported symbols so that +# error messages point to this module instead of the module +# it was originally defined in, e.g. +# openai._exceptions.NotFoundError -> openai.NotFoundError +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + try: + __locals[__name].__module__ = "openai" + except (TypeError, AttributeError): + # Some of our exported symbols are builtins which we can't set attributes for. + pass + +# ------ Module level client ------ +import typing as _t +import typing_extensions as _te + +import httpx as _httpx + +from ._base_client import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES + +api_key: str | None = None + +organization: str | None = None + +project: str | None = None + +base_url: str | _httpx.URL | None = None + +timeout: float | Timeout | None = DEFAULT_TIMEOUT + +max_retries: int = DEFAULT_MAX_RETRIES + +default_headers: _t.Mapping[str, str] | None = None + +default_query: _t.Mapping[str, object] | None = None + +http_client: _httpx.Client | None = None + +_ApiType = _te.Literal["openai", "azure"] + +api_type: _ApiType | None = _t.cast(_ApiType, _os.environ.get("OPENAI_API_TYPE")) + +api_version: str | None = _os.environ.get("OPENAI_API_VERSION") + +azure_endpoint: str | None = _os.environ.get("AZURE_OPENAI_ENDPOINT") + +azure_ad_token: str | None = _os.environ.get("AZURE_OPENAI_AD_TOKEN") + +azure_ad_token_provider: _azure.AzureADTokenProvider | None = None + + +class _ModuleClient(OpenAI): + # Note: we have to use type: ignores here as overriding class members + # with properties is technically unsafe but it is fine for our use case + + @property # type: ignore + @override + def api_key(self) -> str | None: + return api_key + + @api_key.setter # type: ignore + def api_key(self, value: str | None) -> None: # type: ignore + global api_key + + api_key = value + + @property # type: ignore + @override + def organization(self) -> str | None: + return organization + + @organization.setter # type: ignore + def organization(self, value: str | None) -> None: # type: ignore + global organization + + organization = value + + @property # type: ignore + @override + def project(self) -> str | None: + return project + + @project.setter # type: ignore + def project(self, value: str | None) -> None: # type: ignore + global project + + project = value + + @property + @override + def base_url(self) -> _httpx.URL: + if base_url is not None: + return _httpx.URL(base_url) + + return super().base_url + + @base_url.setter + def base_url(self, url: _httpx.URL | str) -> None: + super().base_url = url # type: ignore[misc] + + @property # type: ignore + @override + def timeout(self) -> float | Timeout | None: + return timeout + + @timeout.setter # type: ignore + def timeout(self, value: float | Timeout | None) -> None: # type: ignore + global timeout + + timeout = value + + @property # type: ignore + @override + def max_retries(self) -> int: + return max_retries + + @max_retries.setter # type: ignore + def max_retries(self, value: int) -> None: # type: ignore + global max_retries + + max_retries = value + + @property # type: ignore + @override + def _custom_headers(self) -> _t.Mapping[str, str] | None: + return default_headers + + @_custom_headers.setter # type: ignore + def _custom_headers(self, value: _t.Mapping[str, str] | None) -> None: # type: ignore + global default_headers + + default_headers = value + + @property # type: ignore + @override + def _custom_query(self) -> _t.Mapping[str, object] | None: + return default_query + + @_custom_query.setter # type: ignore + def _custom_query(self, value: _t.Mapping[str, object] | None) -> None: # type: ignore + global default_query + + default_query = value + + @property # type: ignore + @override + def _client(self) -> _httpx.Client: + return http_client or super()._client + + @_client.setter # type: ignore + def _client(self, value: _httpx.Client) -> None: # type: ignore + global http_client + + http_client = value + + +class _AzureModuleClient(_ModuleClient, AzureOpenAI): # type: ignore + ... + + +class _AmbiguousModuleClientUsageError(OpenAIError): + def __init__(self) -> None: + super().__init__( + "Ambiguous use of module client; please set `openai.api_type` or the `OPENAI_API_TYPE` environment variable to `openai` or `azure`" + ) + + +def _has_openai_credentials() -> bool: + return _os.environ.get("OPENAI_API_KEY") is not None + + +def _has_azure_credentials() -> bool: + return azure_endpoint is not None or _os.environ.get("AZURE_OPENAI_API_KEY") is not None + + +def _has_azure_ad_credentials() -> bool: + return ( + _os.environ.get("AZURE_OPENAI_AD_TOKEN") is not None + or azure_ad_token is not None + or azure_ad_token_provider is not None + ) + + +_client: OpenAI | None = None + + +def _load_client() -> OpenAI: # type: ignore[reportUnusedFunction] + global _client + + if _client is None: + global api_type, azure_endpoint, azure_ad_token, api_version + + if azure_endpoint is None: + azure_endpoint = _os.environ.get("AZURE_OPENAI_ENDPOINT") + + if azure_ad_token is None: + azure_ad_token = _os.environ.get("AZURE_OPENAI_AD_TOKEN") + + if api_version is None: + api_version = _os.environ.get("OPENAI_API_VERSION") + + if api_type is None: + has_openai = _has_openai_credentials() + has_azure = _has_azure_credentials() + has_azure_ad = _has_azure_ad_credentials() + + if has_openai and (has_azure or has_azure_ad): + raise _AmbiguousModuleClientUsageError() + + if (azure_ad_token is not None or azure_ad_token_provider is not None) and _os.environ.get( + "AZURE_OPENAI_API_KEY" + ) is not None: + raise _AmbiguousModuleClientUsageError() + + if has_azure or has_azure_ad: + api_type = "azure" + else: + api_type = "openai" + + if api_type == "azure": + _client = _AzureModuleClient( # type: ignore + api_version=api_version, + azure_endpoint=azure_endpoint, + api_key=api_key, + azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, + organization=organization, + base_url=base_url, + timeout=timeout, + max_retries=max_retries, + default_headers=default_headers, + default_query=default_query, + http_client=http_client, + ) + return _client + + _client = _ModuleClient( + api_key=api_key, + organization=organization, + project=project, + base_url=base_url, + timeout=timeout, + max_retries=max_retries, + default_headers=default_headers, + default_query=default_query, + http_client=http_client, + ) + return _client + + return _client + + +def _reset_client() -> None: # type: ignore[reportUnusedFunction] + global _client + + _client = None + + +from ._module_client import ( + beta as beta, + chat as chat, + audio as audio, + files as files, + images as images, + models as models, + batches as batches, + embeddings as embeddings, + completions as completions, + fine_tuning as fine_tuning, + moderations as moderations, +) diff --git a/.venv/Lib/site-packages/openai/__main__.py b/.venv/Lib/site-packages/openai/__main__.py new file mode 100644 index 00000000..4e28416e --- /dev/null +++ b/.venv/Lib/site-packages/openai/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/.venv/Lib/site-packages/openai/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..a836ffff Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/__main__.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 00000000..5934f64b Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/__main__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_base_client.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_base_client.cpython-311.pyc new file mode 100644 index 00000000..8d93c2da Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_base_client.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_client.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_client.cpython-311.pyc new file mode 100644 index 00000000..d3a357af Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_client.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_compat.cpython-311.pyc new file mode 100644 index 00000000..a7f9bbf3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_constants.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_constants.cpython-311.pyc new file mode 100644 index 00000000..939a7dbf Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_exceptions.cpython-311.pyc new file mode 100644 index 00000000..4575746d Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_files.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_files.cpython-311.pyc new file mode 100644 index 00000000..f02cb51a Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_files.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_legacy_response.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_legacy_response.cpython-311.pyc new file mode 100644 index 00000000..e8e32abf Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_legacy_response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_models.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_models.cpython-311.pyc new file mode 100644 index 00000000..41a10f6b Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_module_client.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_module_client.cpython-311.pyc new file mode 100644 index 00000000..4136443d Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_module_client.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_qs.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_qs.cpython-311.pyc new file mode 100644 index 00000000..a5a99e8f Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_qs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_resource.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_resource.cpython-311.pyc new file mode 100644 index 00000000..2aa056d0 Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_resource.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_response.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_response.cpython-311.pyc new file mode 100644 index 00000000..18479d8e Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_streaming.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_streaming.cpython-311.pyc new file mode 100644 index 00000000..428eed35 Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_streaming.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_types.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_types.cpython-311.pyc new file mode 100644 index 00000000..05627a5b Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/_version.cpython-311.pyc new file mode 100644 index 00000000..106808b0 Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/pagination.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/pagination.cpython-311.pyc new file mode 100644 index 00000000..c6400ff5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/pagination.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/openai/__pycache__/version.cpython-311.pyc new file mode 100644 index 00000000..39ac5883 Binary files /dev/null and b/.venv/Lib/site-packages/openai/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_base_client.py b/.venv/Lib/site-packages/openai/_base_client.py new file mode 100644 index 00000000..5d5d25fc --- /dev/null +++ b/.venv/Lib/site-packages/openai/_base_client.py @@ -0,0 +1,2019 @@ +from __future__ import annotations + +import json +import time +import uuid +import email +import asyncio +import inspect +import logging +import platform +import warnings +import email.utils +from types import TracebackType +from random import random +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Type, + Union, + Generic, + Mapping, + TypeVar, + Iterable, + Iterator, + Optional, + Generator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Literal, override, get_origin + +import anyio +import httpx +import distro +import pydantic +from httpx import URL, Limits +from pydantic import PrivateAttr + +from . import _exceptions +from ._qs import Querystring +from ._files import to_httpx_files, async_to_httpx_files +from ._types import ( + NOT_GIVEN, + Body, + Omit, + Query, + Headers, + Timeout, + NotGiven, + ResponseT, + Transport, + AnyMapping, + PostParser, + ProxiesTypes, + RequestFiles, + HttpxSendArgs, + AsyncTransport, + RequestOptions, + ModelBuilderProtocol, +) +from ._utils import is_dict, is_list, is_given, lru_cache, is_mapping +from ._compat import model_copy, model_dump +from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type +from ._response import ( + APIResponse, + BaseAPIResponse, + AsyncAPIResponse, + extract_response_type, +) +from ._constants import ( + DEFAULT_TIMEOUT, + MAX_RETRY_DELAY, + DEFAULT_MAX_RETRIES, + INITIAL_RETRY_DELAY, + RAW_RESPONSE_HEADER, + OVERRIDE_CAST_TO_HEADER, + DEFAULT_CONNECTION_LIMITS, +) +from ._streaming import Stream, SSEDecoder, AsyncStream, SSEBytesDecoder +from ._exceptions import ( + APIStatusError, + APITimeoutError, + APIConnectionError, + APIResponseValidationError, +) +from ._legacy_response import LegacyAPIResponse + +log: logging.Logger = logging.getLogger(__name__) + +# TODO: make base page type vars covariant +SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]") +AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]") + + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +_StreamT = TypeVar("_StreamT", bound=Stream[Any]) +_AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any]) + +if TYPE_CHECKING: + from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT +else: + try: + from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT + except ImportError: + # taken from https://github.com/encode/httpx/blob/3ba5fe0d7ac70222590e759c31442b1cab263791/httpx/_config.py#L366 + HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) + + +class PageInfo: + """Stores the necessary information to build the request to retrieve the next page. + + Either `url` or `params` must be set. + """ + + url: URL | NotGiven + params: Query | NotGiven + + @overload + def __init__( + self, + *, + url: URL, + ) -> None: + ... + + @overload + def __init__( + self, + *, + params: Query, + ) -> None: + ... + + def __init__( + self, + *, + url: URL | NotGiven = NOT_GIVEN, + params: Query | NotGiven = NOT_GIVEN, + ) -> None: + self.url = url + self.params = params + + +class BasePage(GenericModel, Generic[_T]): + """ + Defines the core interface for pagination. + + Type Args: + ModelT: The pydantic model that represents an item in the response. + + Methods: + has_next_page(): Check if there is another page available + next_page_info(): Get the necessary information to make a request for the next page + """ + + _options: FinalRequestOptions = PrivateAttr() + _model: Type[_T] = PrivateAttr() + + def has_next_page(self) -> bool: + items = self._get_page_items() + if not items: + return False + return self.next_page_info() is not None + + def next_page_info(self) -> Optional[PageInfo]: + ... + + def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] + ... + + def _params_from_url(self, url: URL) -> httpx.QueryParams: + # TODO: do we have to preprocess params here? + return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params) + + def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: + options = model_copy(self._options) + options._strip_raw_response_header() + + if not isinstance(info.params, NotGiven): + options.params = {**options.params, **info.params} + return options + + if not isinstance(info.url, NotGiven): + params = self._params_from_url(info.url) + url = info.url.copy_with(params=params) + options.params = dict(url.params) + options.url = str(url) + return options + + raise ValueError("Unexpected PageInfo state") + + +class BaseSyncPage(BasePage[_T], Generic[_T]): + _client: SyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + client: SyncAPIClient, + model: Type[_T], + options: FinalRequestOptions, + ) -> None: + self._model = model + self._client = client + self._options = options + + # Pydantic uses a custom `__iter__` method to support casting BaseModels + # to dictionaries. e.g. dict(model). + # As we want to support `for item in page`, this is inherently incompatible + # with the default pydantic behaviour. It is not possible to support both + # use cases at once. Fortunately, this is not a big deal as all other pydantic + # methods should continue to work as expected as there is an alternative method + # to cast a model to a dictionary, model.dict(), which is used internally + # by pydantic. + def __iter__(self) -> Iterator[_T]: # type: ignore + for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = page.get_next_page() + else: + return + + def get_next_page(self: SyncPageT) -> SyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return self._client._request_api_list(self._model, page=self.__class__, options=options) + + +class AsyncPaginator(Generic[_T, AsyncPageT]): + def __init__( + self, + client: AsyncAPIClient, + options: FinalRequestOptions, + page_cls: Type[AsyncPageT], + model: Type[_T], + ) -> None: + self._model = model + self._client = client + self._options = options + self._page_cls = page_cls + + def __await__(self) -> Generator[Any, None, AsyncPageT]: + return self._get_page().__await__() + + async def _get_page(self) -> AsyncPageT: + def _parser(resp: AsyncPageT) -> AsyncPageT: + resp._set_private_attributes( + model=self._model, + options=self._options, + client=self._client, + ) + return resp + + self._options.post_parser = _parser + + return await self._client.request(self._page_cls, self._options) + + async def __aiter__(self) -> AsyncIterator[_T]: + # https://github.com/microsoft/pyright/issues/3464 + page = cast( + AsyncPageT, + await self, # type: ignore + ) + async for item in page: + yield item + + +class BaseAsyncPage(BasePage[_T], Generic[_T]): + _client: AsyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + model: Type[_T], + client: AsyncAPIClient, + options: FinalRequestOptions, + ) -> None: + self._model = model + self._client = client + self._options = options + + async def __aiter__(self) -> AsyncIterator[_T]: + async for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + async def iter_pages(self: AsyncPageT) -> AsyncIterator[AsyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = await page.get_next_page() + else: + return + + async def get_next_page(self: AsyncPageT) -> AsyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return await self._client._request_api_list(self._model, page=self.__class__, options=options) + + +_HttpxClientT = TypeVar("_HttpxClientT", bound=Union[httpx.Client, httpx.AsyncClient]) +_DefaultStreamT = TypeVar("_DefaultStreamT", bound=Union[Stream[Any], AsyncStream[Any]]) + + +class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]): + _client: _HttpxClientT + _version: str + _base_url: URL + max_retries: int + timeout: Union[float, Timeout, None] + _limits: httpx.Limits + _proxies: ProxiesTypes | None + _transport: Transport | AsyncTransport | None + _strict_response_validation: bool + _idempotency_header: str | None + _default_stream_cls: type[_DefaultStreamT] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None = DEFAULT_TIMEOUT, + limits: httpx.Limits, + transport: Transport | AsyncTransport | None, + proxies: ProxiesTypes | None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + self._version = version + self._base_url = self._enforce_trailing_slash(URL(base_url)) + self.max_retries = max_retries + self.timeout = timeout + self._limits = limits + self._proxies = proxies + self._transport = transport + self._custom_headers = custom_headers or {} + self._custom_query = custom_query or {} + self._strict_response_validation = _strict_response_validation + self._idempotency_header = None + + if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] + raise TypeError( + "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `openai.DEFAULT_MAX_RETRIES`" + ) + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _make_status_error_from_response( + self, + response: httpx.Response, + ) -> APIStatusError: + if response.is_closed and not response.is_stream_consumed: + # We can't read the response body as it has been closed + # before it was read. This can happen if an event hook + # raises a status error. + body = None + err_msg = f"Error code: {response.status_code}" + else: + err_text = response.text.strip() + body = err_text + + try: + body = json.loads(err_text) + err_msg = f"Error code: {response.status_code} - {body}" + except Exception: + err_msg = err_text or f"Error code: {response.status_code}" + + return self._make_status_error(err_msg, body=body, response=response) + + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> _exceptions.APIStatusError: + raise NotImplementedError() + + def _remaining_retries( + self, + remaining_retries: Optional[int], + options: FinalRequestOptions, + ) -> int: + return remaining_retries if remaining_retries is not None else options.get_max_retries(self.max_retries) + + def _build_headers(self, options: FinalRequestOptions) -> httpx.Headers: + custom_headers = options.headers or {} + headers_dict = _merge_mappings(self.default_headers, custom_headers) + self._validate_headers(headers_dict, custom_headers) + + # headers are case-insensitive while dictionaries are not. + headers = httpx.Headers(headers_dict) + + idempotency_header = self._idempotency_header + if idempotency_header and options.method.lower() != "get" and idempotency_header not in headers: + headers[idempotency_header] = options.idempotency_key or self._idempotency_key() + + return headers + + def _prepare_url(self, url: str) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + # Copied from httpx's `_merge_url` method. + merge_url = URL(url) + if merge_url.is_relative_url: + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + + return merge_url + + def _make_sse_decoder(self) -> SSEDecoder | SSEBytesDecoder: + return SSEDecoder() + + def _build_request( + self, + options: FinalRequestOptions, + ) -> httpx.Request: + if log.isEnabledFor(logging.DEBUG): + log.debug("Request options: %s", model_dump(options, exclude_unset=True)) + + kwargs: dict[str, Any] = {} + + json_data = options.json_data + if options.extra_json is not None: + if json_data is None: + json_data = cast(Body, options.extra_json) + elif is_mapping(json_data): + json_data = _merge_mappings(json_data, options.extra_json) + else: + raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") + + headers = self._build_headers(options) + params = _merge_mappings(self._custom_query, options.params) + content_type = headers.get("Content-Type") + + # If the given Content-Type header is multipart/form-data then it + # has to be removed so that httpx can generate the header with + # additional information for us as it has to be in this form + # for the server to be able to correctly parse the request: + # multipart/form-data; boundary=---abc-- + if content_type is not None and content_type.startswith("multipart/form-data"): + if "boundary" not in content_type: + # only remove the header if the boundary hasn't been explicitly set + # as the caller doesn't want httpx to come up with their own boundary + headers.pop("Content-Type") + + # As we are now sending multipart/form-data instead of application/json + # we need to tell httpx to use it, https://www.python-httpx.org/advanced/#multipart-file-encoding + if json_data: + if not is_dict(json_data): + raise TypeError( + f"Expected query input to be a dictionary for multipart requests but got {type(json_data)} instead." + ) + kwargs["data"] = self._serialize_multipartform(json_data) + + # TODO: report this error to httpx + return self._client.build_request( # pyright: ignore[reportUnknownMemberType] + headers=headers, + timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, + method=options.method, + url=self._prepare_url(options.url), + # the `Query` type that we use is incompatible with qs' + # `Params` type as it needs to be typed as `Mapping[str, object]` + # so that passing a `TypedDict` doesn't cause an error. + # https://github.com/microsoft/pyright/issues/3526#event-6715453066 + params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None, + json=json_data, + files=options.files, + **kwargs, + ) + + def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: + items = self.qs.stringify_items( + # TODO: type ignore is required as stringify_items is well typed but we can't be + # well typed without heavy validation. + data, # type: ignore + array_format="brackets", + ) + serialized: dict[str, object] = {} + for key, value in items: + existing = serialized.get(key) + + if not existing: + serialized[key] = value + continue + + # If a value has already been set for this key then that + # means we're sending data like `array[]=[1, 2, 3]` and we + # need to tell httpx that we want to send multiple values with + # the same key which is done by using a list or a tuple. + # + # Note: 2d arrays should never result in the same key at both + # levels so it's safe to assume that if the value is a list, + # it was because we changed it to be a list. + if is_list(existing): + existing.append(value) + else: + serialized[key] = [existing, value] + + return serialized + + def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalRequestOptions) -> type[ResponseT]: + if not is_given(options.headers): + return cast_to + + # make a copy of the headers so we don't mutate user-input + headers = dict(options.headers) + + # we internally support defining a temporary header to override the + # default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response` + # see _response.py for implementation details + override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, NOT_GIVEN) + if is_given(override_cast_to): + options.headers = headers + return cast(Type[ResponseT], override_cast_to) + + return cast_to + + def _should_stream_response_body(self, request: httpx.Request) -> bool: + return request.headers.get(RAW_RESPONSE_HEADER) == "stream" # type: ignore[no-any-return] + + def _process_response_data( + self, + *, + data: object, + cast_to: type[ResponseT], + response: httpx.Response, + ) -> ResponseT: + if data is None: + return cast(ResponseT, None) + + if cast_to is object: + return cast(ResponseT, data) + + try: + if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol): + return cast(ResponseT, cast_to.build(response=response, data=data)) + + if self._strict_response_validation: + return cast(ResponseT, validate_type(type_=cast_to, value=data)) + + return cast(ResponseT, construct_type(type_=cast_to, value=data)) + except pydantic.ValidationError as err: + raise APIResponseValidationError(response=response, body=data) from err + + @property + def qs(self) -> Querystring: + return Querystring() + + @property + def custom_auth(self) -> httpx.Auth | None: + return None + + @property + def auth_headers(self) -> dict[str, str]: + return {} + + @property + def default_headers(self) -> dict[str, str | Omit]: + return { + "Accept": "application/json", + "Content-Type": "application/json", + "User-Agent": self.user_agent, + **self.platform_headers(), + **self.auth_headers, + **self._custom_headers, + } + + def _validate_headers( + self, + headers: Headers, # noqa: ARG002 + custom_headers: Headers, # noqa: ARG002 + ) -> None: + """Validate the given default headers and custom headers. + + Does nothing by default. + """ + return + + @property + def user_agent(self) -> str: + return f"{self.__class__.__name__}/Python {self._version}" + + @property + def base_url(self) -> URL: + return self._base_url + + @base_url.setter + def base_url(self, url: URL | str) -> None: + self._base_url = self._enforce_trailing_slash(url if isinstance(url, URL) else URL(url)) + + def platform_headers(self) -> Dict[str, str]: + return platform_headers(self._version) + + def _parse_retry_after_header(self, response_headers: Optional[httpx.Headers] = None) -> float | None: + """Returns a float of the number of seconds (not milliseconds) to wait after retrying, or None if unspecified. + + About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + See also https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax + """ + if response_headers is None: + return None + + # First, try the non-standard `retry-after-ms` header for milliseconds, + # which is more precise than integer-seconds `retry-after` + try: + retry_ms_header = response_headers.get("retry-after-ms", None) + return float(retry_ms_header) / 1000 + except (TypeError, ValueError): + pass + + # Next, try parsing `retry-after` header as seconds (allowing nonstandard floats). + retry_header = response_headers.get("retry-after") + try: + # note: the spec indicates that this should only ever be an integer + # but if someone sends a float there's no reason for us to not respect it + return float(retry_header) + except (TypeError, ValueError): + pass + + # Last, try parsing `retry-after` as a date. + retry_date_tuple = email.utils.parsedate_tz(retry_header) + if retry_date_tuple is None: + return None + + retry_date = email.utils.mktime_tz(retry_date_tuple) + return float(retry_date - time.time()) + + def _calculate_retry_timeout( + self, + remaining_retries: int, + options: FinalRequestOptions, + response_headers: Optional[httpx.Headers] = None, + ) -> float: + max_retries = options.get_max_retries(self.max_retries) + + # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. + retry_after = self._parse_retry_after_header(response_headers) + if retry_after is not None and 0 < retry_after <= 60: + return retry_after + + nb_retries = max_retries - remaining_retries + + # Apply exponential backoff, but not more than the max. + sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) + + # Apply some jitter, plus-or-minus half a second. + jitter = 1 - 0.25 * random() + timeout = sleep_seconds * jitter + return timeout if timeout >= 0 else 0 + + def _should_retry(self, response: httpx.Response) -> bool: + # Note: this is not a standard header + should_retry_header = response.headers.get("x-should-retry") + + # If the server explicitly says whether or not to retry, obey. + if should_retry_header == "true": + log.debug("Retrying as header `x-should-retry` is set to `true`") + return True + if should_retry_header == "false": + log.debug("Not retrying as header `x-should-retry` is set to `false`") + return False + + # Retry on request timeouts. + if response.status_code == 408: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on lock timeouts. + if response.status_code == 409: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on rate limits. + if response.status_code == 429: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry internal errors. + if response.status_code >= 500: + log.debug("Retrying due to status code %i", response.status_code) + return True + + log.debug("Not retrying") + return False + + def _idempotency_key(self) -> str: + return f"stainless-python-retry-{uuid.uuid4()}" + + +class _DefaultHttpxClient(httpx.Client): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultHttpxClient = httpx.Client + """An alias to `httpx.Client` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.Client` will result in httpx's defaults being used, not ours. + """ +else: + DefaultHttpxClient = _DefaultHttpxClient + + +class SyncHttpxClientWrapper(DefaultHttpxClient): + def __del__(self) -> None: + try: + self.close() + except Exception: + pass + + +class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]): + _client: httpx.Client + _default_stream_cls: type[Stream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + transport: Transport | None = None, + proxies: ProxiesTypes | None = None, + limits: Limits | None = None, + http_client: httpx.Client | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + _strict_response_validation: bool, + ) -> None: + if limits is not None: + warnings.warn( + "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", + category=DeprecationWarning, + stacklevel=3, + ) + if http_client is not None: + raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`") + else: + limits = DEFAULT_CONNECTION_LIMITS + + if transport is not None: + warnings.warn( + "The `transport` argument is deprecated. The `http_client` argument should be passed instead", + category=DeprecationWarning, + stacklevel=3, + ) + if http_client is not None: + raise ValueError("The `http_client` argument is mutually exclusive with `transport`") + + if proxies is not None: + warnings.warn( + "The `proxies` argument is deprecated. The `http_client` argument should be passed instead", + category=DeprecationWarning, + stacklevel=3, + ) + if http_client is not None: + raise ValueError("The `http_client` argument is mutually exclusive with `proxies`") + + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.Client): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.Client` but got {type(http_client)}" + ) + + super().__init__( + version=version, + limits=limits, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + proxies=proxies, + base_url=base_url, + transport=transport, + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or SyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + proxies=proxies, + transport=transport, + limits=limits, + follow_redirects=True, + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + # If an error is thrown while constructing a client, self._client + # may not be present + if hasattr(self, "_client"): + self._client.close() + + def __enter__(self: _T) -> _T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> None: + """Hook for mutating the given options""" + return None + + def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + remaining_retries: Optional[int] = None, + *, + stream: Literal[True], + stream_cls: Type[_StreamT], + ) -> _StreamT: + ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + remaining_retries: Optional[int] = None, + *, + stream: Literal[False] = False, + ) -> ResponseT: + ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + remaining_retries: Optional[int] = None, + *, + stream: bool = False, + stream_cls: Type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + ... + + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + remaining_retries: Optional[int] = None, + *, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + return self._request( + cast_to=cast_to, + options=options, + stream=stream, + stream_cls=stream_cls, + remaining_retries=remaining_retries, + ) + + def _request( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + remaining_retries: int | None, + stream: bool, + stream_cls: type[_StreamT] | None, + ) -> ResponseT | _StreamT: + cast_to = self._maybe_override_cast_to(cast_to, options) + self._prepare_options(options) + + retries = self._remaining_retries(remaining_retries, options) + request = self._build_request(options) + self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + log.debug("Sending HTTP Request: %s %s", request.method, request.url) + + try: + response = self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if retries > 0: + return self._retry_request( + options, + cast_to, + retries, + stream=stream, + stream_cls=stream_cls, + response_headers=None, + ) + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if retries > 0: + return self._retry_request( + options, + cast_to, + retries, + stream=stream, + stream_cls=stream_cls, + response_headers=None, + ) + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + response.headers, + ) + log.debug("request_id: %s", response.headers.get("x-request-id")) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if retries > 0 and self._should_retry(err.response): + err.response.close() + return self._retry_request( + options, + cast_to, + retries, + err.response.headers, + stream=stream, + stream_cls=stream_cls, + ) + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + err.response.read() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + return self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + ) + + def _retry_request( + self, + options: FinalRequestOptions, + cast_to: Type[ResponseT], + remaining_retries: int, + response_headers: httpx.Headers | None, + *, + stream: bool, + stream_cls: type[_StreamT] | None, + ) -> ResponseT | _StreamT: + remaining = remaining_retries - 1 + if remaining == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining) + + timeout = self._calculate_retry_timeout(remaining, options, response_headers) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a + # different thread if necessary. + time.sleep(timeout) + + return self._request( + options=options, + cast_to=cast_to, + remaining_retries=remaining, + stream=stream, + stream_cls=stream_cls, + ) + + def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + ) -> ResponseT: + if response.request.headers.get(RAW_RESPONSE_HEADER) == "true": + return cast( + ResponseT, + LegacyAPIResponse( + raw=response, + client=self, + cast_to=cast_to, + stream=stream, + stream_cls=stream_cls, + options=options, + ), + ) + + origin = get_origin(cast_to) or cast_to + + if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): + if not issubclass(origin, APIResponse): + raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + ResponseT, + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = APIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return api_response.parse() + + def _request_api_list( + self, + model: Type[object], + page: Type[SyncPageT], + options: FinalRequestOptions, + ) -> SyncPageT: + def _parser(resp: SyncPageT) -> SyncPageT: + resp._set_private_attributes( + client=self, + model=model, + options=options, + ) + return resp + + options.post_parser = _parser + + return self.request(page, options, stream=False) + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: + ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: + ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + ... + + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + # cast is required because mypy complains about returning Any even though + # it understands the type variables + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[False] = False, + ) -> ResponseT: + ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: + ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + ... + + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + return self.request(cast_to, opts) + + def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return self.request(cast_to, opts) + + def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[object], + page: Type[SyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> SyncPageT: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +class _DefaultAsyncHttpxClient(httpx.AsyncClient): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultAsyncHttpxClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.AsyncClient` will result in httpx's defaults being used, not ours. + """ +else: + DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient + + +class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): + def __del__(self) -> None: + try: + # TODO(someday): support non asyncio runtimes here + asyncio.get_running_loop().create_task(self.aclose()) + except Exception: + pass + + +class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]): + _client: httpx.AsyncClient + _default_stream_cls: type[AsyncStream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + transport: AsyncTransport | None = None, + proxies: ProxiesTypes | None = None, + limits: Limits | None = None, + http_client: httpx.AsyncClient | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + if limits is not None: + warnings.warn( + "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", + category=DeprecationWarning, + stacklevel=3, + ) + if http_client is not None: + raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`") + else: + limits = DEFAULT_CONNECTION_LIMITS + + if transport is not None: + warnings.warn( + "The `transport` argument is deprecated. The `http_client` argument should be passed instead", + category=DeprecationWarning, + stacklevel=3, + ) + if http_client is not None: + raise ValueError("The `http_client` argument is mutually exclusive with `transport`") + + if proxies is not None: + warnings.warn( + "The `proxies` argument is deprecated. The `http_client` argument should be passed instead", + category=DeprecationWarning, + stacklevel=3, + ) + if http_client is not None: + raise ValueError("The `http_client` argument is mutually exclusive with `proxies`") + + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.AsyncClient): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.AsyncClient` but got {type(http_client)}" + ) + + super().__init__( + version=version, + base_url=base_url, + limits=limits, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + proxies=proxies, + transport=transport, + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or AsyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + proxies=proxies, + transport=transport, + limits=limits, + follow_redirects=True, + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + async def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + await self._client.aclose() + + async def __aenter__(self: _T) -> _T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> None: + """Hook for mutating the given options""" + return None + + async def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[False] = False, + remaining_retries: Optional[int] = None, + ) -> ResponseT: + ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + remaining_retries: Optional[int] = None, + ) -> _AsyncStreamT: + ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + remaining_retries: Optional[int] = None, + ) -> ResponseT | _AsyncStreamT: + ... + + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + remaining_retries: Optional[int] = None, + ) -> ResponseT | _AsyncStreamT: + return await self._request( + cast_to=cast_to, + options=options, + stream=stream, + stream_cls=stream_cls, + remaining_retries=remaining_retries, + ) + + async def _request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool, + stream_cls: type[_AsyncStreamT] | None, + remaining_retries: int | None, + ) -> ResponseT | _AsyncStreamT: + cast_to = self._maybe_override_cast_to(cast_to, options) + await self._prepare_options(options) + + retries = self._remaining_retries(remaining_retries, options) + request = self._build_request(options) + await self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + try: + response = await self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if retries > 0: + return await self._retry_request( + options, + cast_to, + retries, + stream=stream, + stream_cls=stream_cls, + response_headers=None, + ) + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if retries > 0: + return await self._retry_request( + options, + cast_to, + retries, + stream=stream, + stream_cls=stream_cls, + response_headers=None, + ) + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if retries > 0 and self._should_retry(err.response): + await err.response.aclose() + return await self._retry_request( + options, + cast_to, + retries, + err.response.headers, + stream=stream, + stream_cls=stream_cls, + ) + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + await err.response.aread() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + return await self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + ) + + async def _retry_request( + self, + options: FinalRequestOptions, + cast_to: Type[ResponseT], + remaining_retries: int, + response_headers: httpx.Headers | None, + *, + stream: bool, + stream_cls: type[_AsyncStreamT] | None, + ) -> ResponseT | _AsyncStreamT: + remaining = remaining_retries - 1 + if remaining == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining) + + timeout = self._calculate_retry_timeout(remaining, options, response_headers) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + await anyio.sleep(timeout) + + return await self._request( + options=options, + cast_to=cast_to, + remaining_retries=remaining, + stream=stream, + stream_cls=stream_cls, + ) + + async def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + ) -> ResponseT: + if response.request.headers.get(RAW_RESPONSE_HEADER) == "true": + return cast( + ResponseT, + LegacyAPIResponse( + raw=response, + client=self, + cast_to=cast_to, + stream=stream, + stream_cls=stream_cls, + options=options, + ), + ) + + origin = get_origin(cast_to) or cast_to + + if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): + if not issubclass(origin, AsyncAPIResponse): + raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + "ResponseT", + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = AsyncAPIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return await api_response.parse() + + def _request_api_list( + self, + model: Type[_T], + page: Type[AsyncPageT], + options: FinalRequestOptions, + ) -> AsyncPaginator[_T, AsyncPageT]: + return AsyncPaginator(client=self, options=options, page_cls=page, model=model) + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: + ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: + ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + ... + + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: + ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: + ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + ... + + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + async def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + return await self.request(cast_to, opts) + + async def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts) + + async def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return await self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[_T], + page: Type[AsyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> AsyncPaginator[_T, AsyncPageT]: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +def make_request_options( + *, + query: Query | None = None, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + idempotency_key: str | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + post_parser: PostParser | NotGiven = NOT_GIVEN, +) -> RequestOptions: + """Create a dict of type RequestOptions without keys of NotGiven values.""" + options: RequestOptions = {} + if extra_headers is not None: + options["headers"] = extra_headers + + if extra_body is not None: + options["extra_json"] = cast(AnyMapping, extra_body) + + if query is not None: + options["params"] = query + + if extra_query is not None: + options["params"] = {**options.get("params", {}), **extra_query} + + if not isinstance(timeout, NotGiven): + options["timeout"] = timeout + + if idempotency_key is not None: + options["idempotency_key"] = idempotency_key + + if is_given(post_parser): + # internal + options["post_parser"] = post_parser # type: ignore + + return options + + +class OtherPlatform: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"Other:{self.name}" + + +Platform = Union[ + OtherPlatform, + Literal[ + "MacOS", + "Linux", + "Windows", + "FreeBSD", + "OpenBSD", + "iOS", + "Android", + "Unknown", + ], +] + + +def get_platform() -> Platform: + try: + system = platform.system().lower() + platform_name = platform.platform().lower() + except Exception: + return "Unknown" + + if "iphone" in platform_name or "ipad" in platform_name: + # Tested using Python3IDE on an iPhone 11 and Pythonista on an iPad 7 + # system is Darwin and platform_name is a string like: + # - Darwin-21.6.0-iPhone12,1-64bit + # - Darwin-21.6.0-iPad7,11-64bit + return "iOS" + + if system == "darwin": + return "MacOS" + + if system == "windows": + return "Windows" + + if "android" in platform_name: + # Tested using Pydroid 3 + # system is Linux and platform_name is a string like 'Linux-5.10.81-android12-9-00001-geba40aecb3b7-ab8534902-aarch64-with-libc' + return "Android" + + if system == "linux": + # https://distro.readthedocs.io/en/latest/#distro.id + distro_id = distro.id() + if distro_id == "freebsd": + return "FreeBSD" + + if distro_id == "openbsd": + return "OpenBSD" + + return "Linux" + + if platform_name: + return OtherPlatform(platform_name) + + return "Unknown" + + +@lru_cache(maxsize=None) +def platform_headers(version: str) -> Dict[str, str]: + return { + "X-Stainless-Lang": "python", + "X-Stainless-Package-Version": version, + "X-Stainless-OS": str(get_platform()), + "X-Stainless-Arch": str(get_architecture()), + "X-Stainless-Runtime": get_python_runtime(), + "X-Stainless-Runtime-Version": get_python_version(), + } + + +class OtherArch: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"other:{self.name}" + + +Arch = Union[OtherArch, Literal["x32", "x64", "arm", "arm64", "unknown"]] + + +def get_python_runtime() -> str: + try: + return platform.python_implementation() + except Exception: + return "unknown" + + +def get_python_version() -> str: + try: + return platform.python_version() + except Exception: + return "unknown" + + +def get_architecture() -> Arch: + try: + python_bitness, _ = platform.architecture() + machine = platform.machine().lower() + except Exception: + return "unknown" + + if machine in ("arm64", "aarch64"): + return "arm64" + + # TODO: untested + if machine == "arm": + return "arm" + + if machine == "x86_64": + return "x64" + + # TODO: untested + if python_bitness == "32bit": + return "x32" + + if machine: + return OtherArch(machine) + + return "unknown" + + +def _merge_mappings( + obj1: Mapping[_T_co, Union[_T, Omit]], + obj2: Mapping[_T_co, Union[_T, Omit]], +) -> Dict[_T_co, _T]: + """Merge two mappings of the same type, removing any values that are instances of `Omit`. + + In cases with duplicate keys the second mapping takes precedence. + """ + merged = {**obj1, **obj2} + return {key: value for key, value in merged.items() if not isinstance(value, Omit)} diff --git a/.venv/Lib/site-packages/openai/_client.py b/.venv/Lib/site-packages/openai/_client.py new file mode 100644 index 00000000..8f3060c6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_client.py @@ -0,0 +1,535 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, Union, Mapping +from typing_extensions import Self, override + +import httpx + +from . import resources, _exceptions +from ._qs import Querystring +from ._types import ( + NOT_GIVEN, + Omit, + Timeout, + NotGiven, + Transport, + ProxiesTypes, + RequestOptions, +) +from ._utils import ( + is_given, + is_mapping, + get_async_library, +) +from ._version import __version__ +from ._streaming import Stream as Stream, AsyncStream as AsyncStream +from ._exceptions import OpenAIError, APIStatusError +from ._base_client import ( + DEFAULT_MAX_RETRIES, + SyncAPIClient, + AsyncAPIClient, +) + +__all__ = [ + "Timeout", + "Transport", + "ProxiesTypes", + "RequestOptions", + "resources", + "OpenAI", + "AsyncOpenAI", + "Client", + "AsyncClient", +] + + +class OpenAI(SyncAPIClient): + completions: resources.Completions + chat: resources.Chat + embeddings: resources.Embeddings + files: resources.Files + images: resources.Images + audio: resources.Audio + moderations: resources.Moderations + models: resources.Models + fine_tuning: resources.FineTuning + beta: resources.Beta + batches: resources.Batches + with_raw_response: OpenAIWithRawResponse + with_streaming_response: OpenAIWithStreamedResponse + + # client options + api_key: str + organization: str | None + project: str | None + + def __init__( + self, + *, + api_key: str | None = None, + organization: str | None = None, + project: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details. + http_client: httpx.Client | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new synchronous openai client instance. + + This automatically infers the following arguments from their corresponding environment variables if they are not provided: + - `api_key` from `OPENAI_API_KEY` + - `organization` from `OPENAI_ORG_ID` + - `project` from `OPENAI_PROJECT_ID` + """ + if api_key is None: + api_key = os.environ.get("OPENAI_API_KEY") + if api_key is None: + raise OpenAIError( + "The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable" + ) + self.api_key = api_key + + if organization is None: + organization = os.environ.get("OPENAI_ORG_ID") + self.organization = organization + + if project is None: + project = os.environ.get("OPENAI_PROJECT_ID") + self.project = project + + if base_url is None: + base_url = os.environ.get("OPENAI_BASE_URL") + if base_url is None: + base_url = f"https://api.openai.com/v1" + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + self._default_stream_cls = Stream + + self.completions = resources.Completions(self) + self.chat = resources.Chat(self) + self.embeddings = resources.Embeddings(self) + self.files = resources.Files(self) + self.images = resources.Images(self) + self.audio = resources.Audio(self) + self.moderations = resources.Moderations(self) + self.models = resources.Models(self) + self.fine_tuning = resources.FineTuning(self) + self.beta = resources.Beta(self) + self.batches = resources.Batches(self) + self.with_raw_response = OpenAIWithRawResponse(self) + self.with_streaming_response = OpenAIWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": "false", + "OpenAI-Organization": self.organization if self.organization is not None else Omit(), + "OpenAI-Project": self.project if self.project is not None else Omit(), + **self._custom_headers, + } + + def copy( + self, + *, + api_key: str | None = None, + organization: str | None = None, + project: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.Client | None = None, + max_retries: int | NotGiven = NOT_GIVEN, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + organization=organization or self.organization, + project=project or self.project, + base_url=base_url or self.base_url, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + data = body.get("error", body) if is_mapping(body) else body + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=data) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=data) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=data) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=data) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=data) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=data) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=data) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=data) + return APIStatusError(err_msg, response=response, body=data) + + +class AsyncOpenAI(AsyncAPIClient): + completions: resources.AsyncCompletions + chat: resources.AsyncChat + embeddings: resources.AsyncEmbeddings + files: resources.AsyncFiles + images: resources.AsyncImages + audio: resources.AsyncAudio + moderations: resources.AsyncModerations + models: resources.AsyncModels + fine_tuning: resources.AsyncFineTuning + beta: resources.AsyncBeta + batches: resources.AsyncBatches + with_raw_response: AsyncOpenAIWithRawResponse + with_streaming_response: AsyncOpenAIWithStreamedResponse + + # client options + api_key: str + organization: str | None + project: str | None + + def __init__( + self, + *, + api_key: str | None = None, + organization: str | None = None, + project: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details. + http_client: httpx.AsyncClient | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new async openai client instance. + + This automatically infers the following arguments from their corresponding environment variables if they are not provided: + - `api_key` from `OPENAI_API_KEY` + - `organization` from `OPENAI_ORG_ID` + - `project` from `OPENAI_PROJECT_ID` + """ + if api_key is None: + api_key = os.environ.get("OPENAI_API_KEY") + if api_key is None: + raise OpenAIError( + "The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable" + ) + self.api_key = api_key + + if organization is None: + organization = os.environ.get("OPENAI_ORG_ID") + self.organization = organization + + if project is None: + project = os.environ.get("OPENAI_PROJECT_ID") + self.project = project + + if base_url is None: + base_url = os.environ.get("OPENAI_BASE_URL") + if base_url is None: + base_url = f"https://api.openai.com/v1" + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + self._default_stream_cls = AsyncStream + + self.completions = resources.AsyncCompletions(self) + self.chat = resources.AsyncChat(self) + self.embeddings = resources.AsyncEmbeddings(self) + self.files = resources.AsyncFiles(self) + self.images = resources.AsyncImages(self) + self.audio = resources.AsyncAudio(self) + self.moderations = resources.AsyncModerations(self) + self.models = resources.AsyncModels(self) + self.fine_tuning = resources.AsyncFineTuning(self) + self.beta = resources.AsyncBeta(self) + self.batches = resources.AsyncBatches(self) + self.with_raw_response = AsyncOpenAIWithRawResponse(self) + self.with_streaming_response = AsyncOpenAIWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": f"async:{get_async_library()}", + "OpenAI-Organization": self.organization if self.organization is not None else Omit(), + "OpenAI-Project": self.project if self.project is not None else Omit(), + **self._custom_headers, + } + + def copy( + self, + *, + api_key: str | None = None, + organization: str | None = None, + project: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.AsyncClient | None = None, + max_retries: int | NotGiven = NOT_GIVEN, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + organization=organization or self.organization, + project=project or self.project, + base_url=base_url or self.base_url, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + data = body.get("error", body) if is_mapping(body) else body + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=data) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=data) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=data) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=data) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=data) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=data) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=data) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=data) + return APIStatusError(err_msg, response=response, body=data) + + +class OpenAIWithRawResponse: + def __init__(self, client: OpenAI) -> None: + self.completions = resources.CompletionsWithRawResponse(client.completions) + self.chat = resources.ChatWithRawResponse(client.chat) + self.embeddings = resources.EmbeddingsWithRawResponse(client.embeddings) + self.files = resources.FilesWithRawResponse(client.files) + self.images = resources.ImagesWithRawResponse(client.images) + self.audio = resources.AudioWithRawResponse(client.audio) + self.moderations = resources.ModerationsWithRawResponse(client.moderations) + self.models = resources.ModelsWithRawResponse(client.models) + self.fine_tuning = resources.FineTuningWithRawResponse(client.fine_tuning) + self.beta = resources.BetaWithRawResponse(client.beta) + self.batches = resources.BatchesWithRawResponse(client.batches) + + +class AsyncOpenAIWithRawResponse: + def __init__(self, client: AsyncOpenAI) -> None: + self.completions = resources.AsyncCompletionsWithRawResponse(client.completions) + self.chat = resources.AsyncChatWithRawResponse(client.chat) + self.embeddings = resources.AsyncEmbeddingsWithRawResponse(client.embeddings) + self.files = resources.AsyncFilesWithRawResponse(client.files) + self.images = resources.AsyncImagesWithRawResponse(client.images) + self.audio = resources.AsyncAudioWithRawResponse(client.audio) + self.moderations = resources.AsyncModerationsWithRawResponse(client.moderations) + self.models = resources.AsyncModelsWithRawResponse(client.models) + self.fine_tuning = resources.AsyncFineTuningWithRawResponse(client.fine_tuning) + self.beta = resources.AsyncBetaWithRawResponse(client.beta) + self.batches = resources.AsyncBatchesWithRawResponse(client.batches) + + +class OpenAIWithStreamedResponse: + def __init__(self, client: OpenAI) -> None: + self.completions = resources.CompletionsWithStreamingResponse(client.completions) + self.chat = resources.ChatWithStreamingResponse(client.chat) + self.embeddings = resources.EmbeddingsWithStreamingResponse(client.embeddings) + self.files = resources.FilesWithStreamingResponse(client.files) + self.images = resources.ImagesWithStreamingResponse(client.images) + self.audio = resources.AudioWithStreamingResponse(client.audio) + self.moderations = resources.ModerationsWithStreamingResponse(client.moderations) + self.models = resources.ModelsWithStreamingResponse(client.models) + self.fine_tuning = resources.FineTuningWithStreamingResponse(client.fine_tuning) + self.beta = resources.BetaWithStreamingResponse(client.beta) + self.batches = resources.BatchesWithStreamingResponse(client.batches) + + +class AsyncOpenAIWithStreamedResponse: + def __init__(self, client: AsyncOpenAI) -> None: + self.completions = resources.AsyncCompletionsWithStreamingResponse(client.completions) + self.chat = resources.AsyncChatWithStreamingResponse(client.chat) + self.embeddings = resources.AsyncEmbeddingsWithStreamingResponse(client.embeddings) + self.files = resources.AsyncFilesWithStreamingResponse(client.files) + self.images = resources.AsyncImagesWithStreamingResponse(client.images) + self.audio = resources.AsyncAudioWithStreamingResponse(client.audio) + self.moderations = resources.AsyncModerationsWithStreamingResponse(client.moderations) + self.models = resources.AsyncModelsWithStreamingResponse(client.models) + self.fine_tuning = resources.AsyncFineTuningWithStreamingResponse(client.fine_tuning) + self.beta = resources.AsyncBetaWithStreamingResponse(client.beta) + self.batches = resources.AsyncBatchesWithStreamingResponse(client.batches) + + +Client = OpenAI + +AsyncClient = AsyncOpenAI diff --git a/.venv/Lib/site-packages/openai/_compat.py b/.venv/Lib/site-packages/openai/_compat.py new file mode 100644 index 00000000..74c7639b --- /dev/null +++ b/.venv/Lib/site-packages/openai/_compat.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload +from datetime import date, datetime +from typing_extensions import Self + +import pydantic +from pydantic.fields import FieldInfo + +from ._types import StrBytesIntFloat + +_T = TypeVar("_T") +_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) + +# --------------- Pydantic v2 compatibility --------------- + +# Pyright incorrectly reports some of our functions as overriding a method when they don't +# pyright: reportIncompatibleMethodOverride=false + +PYDANTIC_V2 = pydantic.VERSION.startswith("2.") + +# v1 re-exports +if TYPE_CHECKING: + + def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 + ... + + def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001 + ... + + def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001 + ... + + def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001 + ... + + def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001 + ... + + def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001 + ... + + def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 + ... + +else: + if PYDANTIC_V2: + from pydantic.v1.typing import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, + ) + from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + else: + from pydantic.typing import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, + ) + from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + + +# refactored config +if TYPE_CHECKING: + from pydantic import ConfigDict as ConfigDict +else: + if PYDANTIC_V2: + from pydantic import ConfigDict + else: + # TODO: provide an error message here? + ConfigDict = None + + +# renamed methods / properties +def parse_obj(model: type[_ModelT], value: object) -> _ModelT: + if PYDANTIC_V2: + return model.model_validate(value) + else: + return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + + +def field_is_required(field: FieldInfo) -> bool: + if PYDANTIC_V2: + return field.is_required() + return field.required # type: ignore + + +def field_get_default(field: FieldInfo) -> Any: + value = field.get_default() + if PYDANTIC_V2: + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + return value + + +def field_outer_type(field: FieldInfo) -> Any: + if PYDANTIC_V2: + return field.annotation + return field.outer_type_ # type: ignore + + +def get_model_config(model: type[pydantic.BaseModel]) -> Any: + if PYDANTIC_V2: + return model.model_config + return model.__config__ # type: ignore + + +def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: + if PYDANTIC_V2: + return model.model_fields + return model.__fields__ # type: ignore + + +def model_copy(model: _ModelT) -> _ModelT: + if PYDANTIC_V2: + return model.model_copy() + return model.copy() # type: ignore + + +def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: + if PYDANTIC_V2: + return model.model_dump_json(indent=indent) + return model.json(indent=indent) # type: ignore + + +def model_dump( + model: pydantic.BaseModel, + *, + exclude_unset: bool = False, + exclude_defaults: bool = False, +) -> dict[str, Any]: + if PYDANTIC_V2: + return model.model_dump( + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + ) + return cast( + "dict[str, Any]", + model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + ), + ) + + +def model_parse(model: type[_ModelT], data: Any) -> _ModelT: + if PYDANTIC_V2: + return model.model_validate(data) + return model.parse_obj(data) # pyright: ignore[reportDeprecated] + + +# generic models +if TYPE_CHECKING: + + class GenericModel(pydantic.BaseModel): + ... + +else: + if PYDANTIC_V2: + # there no longer needs to be a distinction in v2 but + # we still have to create our own subclass to avoid + # inconsistent MRO ordering errors + class GenericModel(pydantic.BaseModel): + ... + + else: + import pydantic.generics + + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): + ... + + +# cached properties +if TYPE_CHECKING: + cached_property = property + + # we define a separate type (copied from typeshed) + # that represents that `cached_property` is `set`able + # at runtime, which differs from `@property`. + # + # this is a separate type as editors likely special case + # `@property` and we don't want to cause issues just to have + # more helpful internal types. + + class typed_cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + + def __init__(self, func: Callable[[Any], _T]) -> None: + ... + + @overload + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: + ... + + @overload + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: + ... + + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: + raise NotImplementedError() + + def __set_name__(self, owner: type[Any], name: str) -> None: + ... + + # __set__ is not defined at runtime, but @cached_property is designed to be settable + def __set__(self, instance: object, value: _T) -> None: + ... +else: + try: + from functools import cached_property as cached_property + except ImportError: + from cached_property import cached_property as cached_property + + typed_cached_property = cached_property diff --git a/.venv/Lib/site-packages/openai/_constants.py b/.venv/Lib/site-packages/openai/_constants.py new file mode 100644 index 00000000..3f82bed0 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_constants.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import httpx + +RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" +OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to" + +# default timeout is 10 minutes +DEFAULT_TIMEOUT = httpx.Timeout(timeout=600.0, connect=5.0) +DEFAULT_MAX_RETRIES = 2 +DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=1000, max_keepalive_connections=100) + +INITIAL_RETRY_DELAY = 0.5 +MAX_RETRY_DELAY = 8.0 diff --git a/.venv/Lib/site-packages/openai/_exceptions.py b/.venv/Lib/site-packages/openai/_exceptions.py new file mode 100644 index 00000000..f6731cfa --- /dev/null +++ b/.venv/Lib/site-packages/openai/_exceptions.py @@ -0,0 +1,127 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Any, Optional, cast +from typing_extensions import Literal + +import httpx + +from ._utils import is_dict +from ._models import construct_type + +__all__ = [ + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", +] + + +class OpenAIError(Exception): + pass + + +class APIError(OpenAIError): + message: str + request: httpx.Request + + body: object | None + """The API response body. + + If the API responded with a valid JSON structure then this property will be the + decoded result. + + If it isn't a valid JSON structure then this will be the raw response. + + If there was no response associated with this error then it will be `None`. + """ + + code: Optional[str] = None + param: Optional[str] = None + type: Optional[str] + + def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: + super().__init__(message) + self.request = request + self.message = message + self.body = body + + if is_dict(body): + self.code = cast(Any, construct_type(type_=Optional[str], value=body.get("code"))) + self.param = cast(Any, construct_type(type_=Optional[str], value=body.get("param"))) + self.type = cast(Any, construct_type(type_=str, value=body.get("type"))) + else: + self.code = None + self.param = None + self.type = None + + +class APIResponseValidationError(APIError): + response: httpx.Response + status_code: int + + def __init__(self, response: httpx.Response, body: object | None, *, message: str | None = None) -> None: + super().__init__(message or "Data returned by API invalid for expected schema.", response.request, body=body) + self.response = response + self.status_code = response.status_code + + +class APIStatusError(APIError): + """Raised when an API response has a status code of 4xx or 5xx.""" + + response: httpx.Response + status_code: int + request_id: str | None + + def __init__(self, message: str, *, response: httpx.Response, body: object | None) -> None: + super().__init__(message, response.request, body=body) + self.response = response + self.status_code = response.status_code + self.request_id = response.headers.get("x-request-id") + + +class APIConnectionError(APIError): + def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None: + super().__init__(message, request, body=None) + + +class APITimeoutError(APIConnectionError): + def __init__(self, request: httpx.Request) -> None: + super().__init__(message="Request timed out.", request=request) + + +class BadRequestError(APIStatusError): + status_code: Literal[400] = 400 # pyright: ignore[reportIncompatibleVariableOverride] + + +class AuthenticationError(APIStatusError): + status_code: Literal[401] = 401 # pyright: ignore[reportIncompatibleVariableOverride] + + +class PermissionDeniedError(APIStatusError): + status_code: Literal[403] = 403 # pyright: ignore[reportIncompatibleVariableOverride] + + +class NotFoundError(APIStatusError): + status_code: Literal[404] = 404 # pyright: ignore[reportIncompatibleVariableOverride] + + +class ConflictError(APIStatusError): + status_code: Literal[409] = 409 # pyright: ignore[reportIncompatibleVariableOverride] + + +class UnprocessableEntityError(APIStatusError): + status_code: Literal[422] = 422 # pyright: ignore[reportIncompatibleVariableOverride] + + +class RateLimitError(APIStatusError): + status_code: Literal[429] = 429 # pyright: ignore[reportIncompatibleVariableOverride] + + +class InternalServerError(APIStatusError): + pass diff --git a/.venv/Lib/site-packages/openai/_extras/__init__.py b/.venv/Lib/site-packages/openai/_extras/__init__.py new file mode 100644 index 00000000..864dac41 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_extras/__init__.py @@ -0,0 +1,2 @@ +from .numpy_proxy import numpy as numpy, has_numpy as has_numpy +from .pandas_proxy import pandas as pandas diff --git a/.venv/Lib/site-packages/openai/_extras/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/_extras/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..63122aa3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_extras/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_extras/__pycache__/_common.cpython-311.pyc b/.venv/Lib/site-packages/openai/_extras/__pycache__/_common.cpython-311.pyc new file mode 100644 index 00000000..f00e2d04 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_extras/__pycache__/_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_extras/__pycache__/numpy_proxy.cpython-311.pyc b/.venv/Lib/site-packages/openai/_extras/__pycache__/numpy_proxy.cpython-311.pyc new file mode 100644 index 00000000..98778cef Binary files /dev/null and b/.venv/Lib/site-packages/openai/_extras/__pycache__/numpy_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_extras/__pycache__/pandas_proxy.cpython-311.pyc b/.venv/Lib/site-packages/openai/_extras/__pycache__/pandas_proxy.cpython-311.pyc new file mode 100644 index 00000000..256716d8 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_extras/__pycache__/pandas_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_extras/_common.py b/.venv/Lib/site-packages/openai/_extras/_common.py new file mode 100644 index 00000000..6e71720e --- /dev/null +++ b/.venv/Lib/site-packages/openai/_extras/_common.py @@ -0,0 +1,21 @@ +from .._exceptions import OpenAIError + +INSTRUCTIONS = """ + +OpenAI error: + + missing `{library}` + +This feature requires additional dependencies: + + $ pip install openai[{extra}] + +""" + + +def format_instructions(*, library: str, extra: str) -> str: + return INSTRUCTIONS.format(library=library, extra=extra) + + +class MissingDependencyError(OpenAIError): + pass diff --git a/.venv/Lib/site-packages/openai/_extras/numpy_proxy.py b/.venv/Lib/site-packages/openai/_extras/numpy_proxy.py new file mode 100644 index 00000000..27880bf1 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_extras/numpy_proxy.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any +from typing_extensions import override + +from .._utils import LazyProxy +from ._common import MissingDependencyError, format_instructions + +if TYPE_CHECKING: + import numpy as numpy + + +NUMPY_INSTRUCTIONS = format_instructions(library="numpy", extra="datalib") + + +class NumpyProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + try: + import numpy + except ImportError as err: + raise MissingDependencyError(NUMPY_INSTRUCTIONS) from err + + return numpy + + +if not TYPE_CHECKING: + numpy = NumpyProxy() + + +def has_numpy() -> bool: + try: + import numpy # noqa: F401 # pyright: ignore[reportUnusedImport] + except ImportError: + return False + + return True diff --git a/.venv/Lib/site-packages/openai/_extras/pandas_proxy.py b/.venv/Lib/site-packages/openai/_extras/pandas_proxy.py new file mode 100644 index 00000000..686377ba --- /dev/null +++ b/.venv/Lib/site-packages/openai/_extras/pandas_proxy.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any +from typing_extensions import override + +from .._utils import LazyProxy +from ._common import MissingDependencyError, format_instructions + +if TYPE_CHECKING: + import pandas as pandas + + +PANDAS_INSTRUCTIONS = format_instructions(library="pandas", extra="datalib") + + +class PandasProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + try: + import pandas + except ImportError as err: + raise MissingDependencyError(PANDAS_INSTRUCTIONS) from err + + return pandas + + +if not TYPE_CHECKING: + pandas = PandasProxy() diff --git a/.venv/Lib/site-packages/openai/_files.py b/.venv/Lib/site-packages/openai/_files.py new file mode 100644 index 00000000..ad7b668b --- /dev/null +++ b/.venv/Lib/site-packages/openai/_files.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +import io +import os +import pathlib +from typing import overload +from typing_extensions import TypeGuard + +import anyio + +from ._types import ( + FileTypes, + FileContent, + RequestFiles, + HttpxFileTypes, + Base64FileInput, + HttpxFileContent, + HttpxRequestFiles, +) +from ._utils import is_tuple_t, is_mapping_t, is_sequence_t + + +def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]: + return isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + + +def is_file_content(obj: object) -> TypeGuard[FileContent]: + return ( + isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + ) + + +def assert_is_file_content(obj: object, *, key: str | None = None) -> None: + if not is_file_content(obj): + prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" + raise RuntimeError( + f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead. See https://github.com/openai/openai-python/tree/main#file-uploads" + ) from None + + +@overload +def to_httpx_files(files: None) -> None: + ... + + +@overload +def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: + ... + + +def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: _transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, _transform_file(file)) for key, file in files] + else: + raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +def _transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = pathlib.Path(file) + return (path.name, path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], _read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +def _read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return pathlib.Path(file).read_bytes() + return file + + +@overload +async def async_to_httpx_files(files: None) -> None: + ... + + +@overload +async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: + ... + + +async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: await _async_transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, await _async_transform_file(file)) for key, file in files] + else: + raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = anyio.Path(file) + return (path.name, await path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], await _async_read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +async def _async_read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return await anyio.Path(file).read_bytes() + + return file diff --git a/.venv/Lib/site-packages/openai/_legacy_response.py b/.venv/Lib/site-packages/openai/_legacy_response.py new file mode 100644 index 00000000..1de906b1 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_legacy_response.py @@ -0,0 +1,460 @@ +from __future__ import annotations + +import os +import inspect +import logging +import datetime +import functools +from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, Iterator, AsyncIterator, cast, overload +from typing_extensions import Awaitable, ParamSpec, override, deprecated, get_origin + +import anyio +import httpx +import pydantic + +from ._types import NoneType +from ._utils import is_given, extract_type_arg, is_annotated_type +from ._models import BaseModel, is_basemodel +from ._constants import RAW_RESPONSE_HEADER +from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type +from ._exceptions import APIResponseValidationError + +if TYPE_CHECKING: + from ._models import FinalRequestOptions + from ._base_client import BaseClient + + +P = ParamSpec("P") +R = TypeVar("R") +_T = TypeVar("_T") + +log: logging.Logger = logging.getLogger(__name__) + + +class LegacyAPIResponse(Generic[R]): + """This is a legacy class as it will be replaced by `APIResponse` + and `AsyncAPIResponse` in the `_response.py` file in the next major + release. + + For the sync client this will mostly be the same with the exception + of `content` & `text` will be methods instead of properties. In the + async client, all methods will be async. + + A migration script will be provided & the migration in general should + be smooth. + """ + + _cast_to: type[R] + _client: BaseClient[Any, Any] + _parsed_by_type: dict[type[Any], Any] + _stream: bool + _stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None + _options: FinalRequestOptions + + http_response: httpx.Response + + def __init__( + self, + *, + raw: httpx.Response, + cast_to: type[R], + client: BaseClient[Any, Any], + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + options: FinalRequestOptions, + ) -> None: + self._cast_to = cast_to + self._client = client + self._parsed_by_type = {} + self._stream = stream + self._stream_cls = stream_cls + self._options = options + self.http_response = raw + + @property + def request_id(self) -> str | None: + return self.http_response.headers.get("x-request-id") # type: ignore[no-any-return] + + @overload + def parse(self, *, to: type[_T]) -> _T: + ... + + @overload + def parse(self) -> R: + ... + + def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + NOTE: For the async client: this will become a coroutine in the next major version. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from openai import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `int` + - `float` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + @property + def headers(self) -> httpx.Headers: + return self.http_response.headers + + @property + def http_request(self) -> httpx.Request: + return self.http_response.request + + @property + def status_code(self) -> int: + return self.http_response.status_code + + @property + def url(self) -> httpx.URL: + return self.http_response.url + + @property + def method(self) -> str: + return self.http_request.method + + @property + def content(self) -> bytes: + """Return the binary response content. + + NOTE: this will be removed in favour of `.read()` in the + next major version. + """ + return self.http_response.content + + @property + def text(self) -> str: + """Return the decoded response content. + + NOTE: this will be turned into a method in the next major version. + """ + return self.http_response.text + + @property + def http_version(self) -> str: + return self.http_response.http_version + + @property + def is_closed(self) -> bool: + return self.http_response.is_closed + + @property + def elapsed(self) -> datetime.timedelta: + """The time taken for the complete request/response cycle to complete.""" + return self.http_response.elapsed + + def _parse(self, *, to: type[_T] | None = None) -> R | _T: + # unwrap `Annotated[T, ...]` -> `T` + if to and is_annotated_type(to): + to = extract_type_arg(to, 0) + + if self._stream: + if to: + if not is_stream_class_type(to): + raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}") + + return cast( + _T, + to( + cast_to=extract_stream_chunk_type( + to, + failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]", + ), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if self._stream_cls: + return cast( + R, + self._stream_cls( + cast_to=extract_stream_chunk_type(self._stream_cls), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls) + if stream_cls is None: + raise MissingStreamClassError() + + return cast( + R, + stream_cls( + cast_to=self._cast_to, + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + cast_to = to if to is not None else self._cast_to + + # unwrap `Annotated[T, ...]` -> `T` + if is_annotated_type(cast_to): + cast_to = extract_type_arg(cast_to, 0) + + if cast_to is NoneType: + return cast(R, None) + + response = self.http_response + if cast_to == str: + return cast(R, response.text) + + if cast_to == int: + return cast(R, int(response.text)) + + if cast_to == float: + return cast(R, float(response.text)) + + origin = get_origin(cast_to) or cast_to + + if inspect.isclass(origin) and issubclass(origin, HttpxBinaryResponseContent): + return cast(R, cast_to(response)) # type: ignore + + if origin == LegacyAPIResponse: + raise RuntimeError("Unexpected state - cast_to is `APIResponse`") + + if inspect.isclass(origin) and issubclass(origin, httpx.Response): + # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response + # and pass that class to our request functions. We cannot change the variance to be either + # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct + # the response class ourselves but that is something that should be supported directly in httpx + # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. + if cast_to != httpx.Response: + raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") + return cast(R, response) + + if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel): + raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`") + + if ( + cast_to is not object + and not origin is list + and not origin is dict + and not origin is Union + and not issubclass(origin, BaseModel) + ): + raise RuntimeError( + f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." + ) + + # split is required to handle cases where additional information is included + # in the response, e.g. application/json; charset=utf-8 + content_type, *_ = response.headers.get("content-type", "*").split(";") + if content_type != "application/json": + if is_basemodel(cast_to): + try: + data = response.json() + except Exception as exc: + log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) + else: + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + if self._client._strict_response_validation: + raise APIResponseValidationError( + response=response, + message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", + body=response.text, + ) + + # If the API responds with content that isn't JSON then we just return + # the (decoded) text without performing any parsing so that you can still + # handle the response however you need to. + return response.text # type: ignore + + data = response.json() + + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + @override + def __repr__(self) -> str: + return f"" + + +class MissingStreamClassError(TypeError): + def __init__(self) -> None: + super().__init__( + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `openai._streaming` for reference", + ) + + +def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, LegacyAPIResponse[R]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> LegacyAPIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "true" + + kwargs["extra_headers"] = extra_headers + + return cast(LegacyAPIResponse[R], func(*args, **kwargs)) + + return wrapped + + +def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[LegacyAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + async def wrapped(*args: P.args, **kwargs: P.kwargs) -> LegacyAPIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "true" + + kwargs["extra_headers"] = extra_headers + + return cast(LegacyAPIResponse[R], await func(*args, **kwargs)) + + return wrapped + + +class HttpxBinaryResponseContent: + response: httpx.Response + + def __init__(self, response: httpx.Response) -> None: + self.response = response + + @property + def content(self) -> bytes: + return self.response.content + + @property + def text(self) -> str: + return self.response.text + + @property + def encoding(self) -> str | None: + return self.response.encoding + + @property + def charset_encoding(self) -> str | None: + return self.response.charset_encoding + + def json(self, **kwargs: Any) -> Any: + return self.response.json(**kwargs) + + def read(self) -> bytes: + return self.response.read() + + def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: + return self.response.iter_bytes(chunk_size) + + def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: + return self.response.iter_text(chunk_size) + + def iter_lines(self) -> Iterator[str]: + return self.response.iter_lines() + + def iter_raw(self, chunk_size: int | None = None) -> Iterator[bytes]: + return self.response.iter_raw(chunk_size) + + def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `client.with_streaming_response.foo().stream_to_file('my_filename.txt')` + """ + with open(file, mode="wb") as f: + for data in self.response.iter_bytes(): + f.write(data) + + @deprecated( + "Due to a bug, this method doesn't actually stream the response content, `.with_streaming_response.method()` should be used instead" + ) + def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + with open(file, mode="wb") as f: + for data in self.response.iter_bytes(chunk_size): + f.write(data) + + def close(self) -> None: + return self.response.close() + + async def aread(self) -> bytes: + return await self.response.aread() + + async def aiter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + return self.response.aiter_bytes(chunk_size) + + async def aiter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: + return self.response.aiter_text(chunk_size) + + async def aiter_lines(self) -> AsyncIterator[str]: + return self.response.aiter_lines() + + async def aiter_raw(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + return self.response.aiter_raw(chunk_size) + + @deprecated( + "Due to a bug, this method doesn't actually stream the response content, `.with_streaming_response.method()` should be used instead" + ) + async def astream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.response.aiter_bytes(chunk_size): + await f.write(data) + + async def aclose(self) -> None: + return await self.response.aclose() diff --git a/.venv/Lib/site-packages/openai/_models.py b/.venv/Lib/site-packages/openai/_models.py new file mode 100644 index 00000000..ff3f54e2 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_models.py @@ -0,0 +1,727 @@ +from __future__ import annotations + +import os +import inspect +from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast +from datetime import date, datetime +from typing_extensions import ( + Unpack, + Literal, + ClassVar, + Protocol, + Required, + TypedDict, + TypeGuard, + final, + override, + runtime_checkable, +) + +import pydantic +import pydantic.generics +from pydantic.fields import FieldInfo + +from ._types import ( + Body, + IncEx, + Query, + ModelT, + Headers, + Timeout, + NotGiven, + AnyMapping, + HttpxRequestFiles, +) +from ._utils import ( + PropertyInfo, + is_list, + is_given, + lru_cache, + is_mapping, + parse_date, + coerce_boolean, + parse_datetime, + strip_not_given, + extract_type_arg, + is_annotated_type, + strip_annotated_type, +) +from ._compat import ( + PYDANTIC_V2, + ConfigDict, + GenericModel as BaseGenericModel, + get_args, + is_union, + parse_obj, + get_origin, + is_literal_type, + get_model_config, + get_model_fields, + field_get_default, +) +from ._constants import RAW_RESPONSE_HEADER + +if TYPE_CHECKING: + from pydantic_core.core_schema import ModelField, ModelFieldsSchema + +__all__ = ["BaseModel", "GenericModel"] + +_T = TypeVar("_T") + + +@runtime_checkable +class _ConfigProtocol(Protocol): + allow_population_by_field_name: bool + + +class BaseModel(pydantic.BaseModel): + if PYDANTIC_V2: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) + ) + else: + + @property + @override + def model_fields_set(self) -> set[str]: + # a forwards-compat shim for pydantic v2 + return self.__fields_set__ # type: ignore + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + extra: Any = pydantic.Extra.allow # type: ignore + + def to_dict( + self, + *, + mode: Literal["json", "python"] = "python", + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> dict[str, object]: + """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + mode: + If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`. + If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)` + + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value from the output. + exclude_none: Whether to exclude fields that have a value of `None` from the output. + warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2. + """ + return self.model_dump( + mode=mode, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + def to_json( + self, + *, + indent: int | None = 2, + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> str: + """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation). + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2` + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2. + """ + return self.model_dump_json( + indent=indent, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + @override + def __str__(self) -> str: + # mypy complains about an invalid self arg + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' # type: ignore[misc] + + # Override the 'construct' method in a way that supports recursive parsing without validation. + # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. + @classmethod + @override + def construct( + cls: Type[ModelT], + _fields_set: set[str] | None = None, + **values: object, + ) -> ModelT: + m = cls.__new__(cls) + fields_values: dict[str, object] = {} + + config = get_model_config(cls) + populate_by_name = ( + config.allow_population_by_field_name + if isinstance(config, _ConfigProtocol) + else config.get("populate_by_name") + ) + + if _fields_set is None: + _fields_set = set() + + model_fields = get_model_fields(cls) + for name, field in model_fields.items(): + key = field.alias + if key is None or (key not in values and populate_by_name): + key = name + + if key in values: + fields_values[name] = _construct_field(value=values[key], field=field, key=key) + _fields_set.add(name) + else: + fields_values[name] = field_get_default(field) + + _extra = {} + for key, value in values.items(): + if key not in model_fields: + if PYDANTIC_V2: + _extra[key] = value + else: + _fields_set.add(key) + fields_values[key] = value + + object.__setattr__(m, "__dict__", fields_values) + + if PYDANTIC_V2: + # these properties are copied from Pydantic's `model_construct()` method + object.__setattr__(m, "__pydantic_private__", None) + object.__setattr__(m, "__pydantic_extra__", _extra) + object.__setattr__(m, "__pydantic_fields_set__", _fields_set) + else: + # init_private_attributes() does not exist in v2 + m._init_private_attributes() # type: ignore + + # copied from Pydantic v1's `construct()` method + object.__setattr__(m, "__fields_set__", _fields_set) + + return m + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + # because the type signatures are technically different + # although not in practice + model_construct = construct + + if not PYDANTIC_V2: + # we define aliases for some of the new pydantic v2 methods so + # that we can just document these methods without having to specify + # a specific pydantic version as some users may not know which + # pydantic version they are currently using + + @override + def model_dump( + self, + *, + mode: Literal["json", "python"] | str = "python", + include: IncEx = None, + exclude: IncEx = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool = True, + ) -> dict[str, Any]: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump + + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + Args: + mode: The mode in which `to_python` should run. + If mode is 'json', the dictionary will only contain JSON serializable types. + If mode is 'python', the dictionary may contain any Python objects. + include: A list of fields to include in the output. + exclude: A list of fields to exclude from the output. + by_alias: Whether to use the field's alias in the dictionary key if defined. + exclude_unset: Whether to exclude fields that are unset or None from the output. + exclude_defaults: Whether to exclude fields that are set to their default value from the output. + exclude_none: Whether to exclude fields that have a value of `None` from the output. + round_trip: Whether to enable serialization and deserialization round-trip support. + warnings: Whether to log warnings when invalid fields are encountered. + + Returns: + A dictionary representation of the model. + """ + if mode != "python": + raise ValueError("mode is only supported in Pydantic v2") + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + return super().dict( # pyright: ignore[reportDeprecated] + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + @override + def model_dump_json( + self, + *, + indent: int | None = None, + include: IncEx = None, + exclude: IncEx = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool = True, + ) -> str: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json + + Generates a JSON representation of the model using Pydantic's `to_json` method. + + Args: + indent: Indentation to use in the JSON output. If None is passed, the output will be compact. + include: Field(s) to include in the JSON output. Can take either a string or set of strings. + exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings. + by_alias: Whether to serialize using field aliases. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + round_trip: Whether to use serialization/deserialization between JSON and class instance. + warnings: Whether to show any warnings that occurred during serialization. + + Returns: + A JSON string representation of the model. + """ + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + return super().json( # type: ignore[reportDeprecated] + indent=indent, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + +def _construct_field(value: object, field: FieldInfo, key: str) -> object: + if value is None: + return field_get_default(field) + + if PYDANTIC_V2: + type_ = field.annotation + else: + type_ = cast(type, field.outer_type_) # type: ignore + + if type_ is None: + raise RuntimeError(f"Unexpected field type is None for {key}") + + return construct_type(value=value, type_=type_) + + +def is_basemodel(type_: type) -> bool: + """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" + if is_union(type_): + for variant in get_args(type_): + if is_basemodel(variant): + return True + + return False + + return is_basemodel_type(type_) + + +def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: + origin = get_origin(type_) or type_ + return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) + + +def construct_type(*, value: object, type_: object) -> object: + """Loose coercion to the expected type with construction of nested values. + + If the given value does not match the expected type then it is returned as-is. + """ + # we allow `object` as the input type because otherwise, passing things like + # `Literal['value']` will be reported as a type error by type checkers + type_ = cast("type[object]", type_) + + # unwrap `Annotated[T, ...]` -> `T` + if is_annotated_type(type_): + meta: tuple[Any, ...] = get_args(type_)[1:] + type_ = extract_type_arg(type_, 0) + else: + meta = tuple() + + # we need to use the origin class for any types that are subscripted generics + # e.g. Dict[str, object] + origin = get_origin(type_) or type_ + args = get_args(type_) + + if is_union(origin): + try: + return validate_type(type_=cast("type[object]", type_), value=value) + except Exception: + pass + + # if the type is a discriminated union then we want to construct the right variant + # in the union, even if the data doesn't match exactly, otherwise we'd break code + # that relies on the constructed class types, e.g. + # + # class FooType: + # kind: Literal['foo'] + # value: str + # + # class BarType: + # kind: Literal['bar'] + # value: int + # + # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then + # we'd end up constructing `FooType` when it should be `BarType`. + discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta) + if discriminator and is_mapping(value): + variant_value = value.get(discriminator.field_alias_from or discriminator.field_name) + if variant_value and isinstance(variant_value, str): + variant_type = discriminator.mapping.get(variant_value) + if variant_type: + return construct_type(type_=variant_type, value=value) + + # if the data is not valid, use the first variant that doesn't fail while deserializing + for variant in args: + try: + return construct_type(value=value, type_=variant) + except Exception: + continue + + raise RuntimeError(f"Could not convert data into a valid instance of {type_}") + + if origin == dict: + if not is_mapping(value): + return value + + _, items_type = get_args(type_) # Dict[_, items_type] + return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} + + if not is_literal_type(type_) and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)): + if is_list(value): + return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] + + if is_mapping(value): + if issubclass(type_, BaseModel): + return type_.construct(**value) # type: ignore[arg-type] + + return cast(Any, type_).construct(**value) + + if origin == list: + if not is_list(value): + return value + + inner_type = args[0] # List[inner_type] + return [construct_type(value=entry, type_=inner_type) for entry in value] + + if origin == float: + if isinstance(value, int): + coerced = float(value) + if coerced != value: + return value + return coerced + + return value + + if type_ == datetime: + try: + return parse_datetime(value) # type: ignore + except Exception: + return value + + if type_ == date: + try: + return parse_date(value) # type: ignore + except Exception: + return value + + return value + + +@runtime_checkable +class CachedDiscriminatorType(Protocol): + __discriminator__: DiscriminatorDetails + + +class DiscriminatorDetails: + field_name: str + """The name of the discriminator field in the variant class, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] + ``` + + Will result in field_name='type' + """ + + field_alias_from: str | None + """The name of the discriminator field in the API response, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] = Field(alias='type_from_api') + ``` + + Will result in field_alias_from='type_from_api' + """ + + mapping: dict[str, type] + """Mapping of discriminator value to variant type, e.g. + + {'foo': FooVariant, 'bar': BarVariant} + """ + + def __init__( + self, + *, + mapping: dict[str, type], + discriminator_field: str, + discriminator_alias: str | None, + ) -> None: + self.mapping = mapping + self.field_name = discriminator_field + self.field_alias_from = discriminator_alias + + +def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: + if isinstance(union, CachedDiscriminatorType): + return union.__discriminator__ + + discriminator_field_name: str | None = None + + for annotation in meta_annotations: + if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None: + discriminator_field_name = annotation.discriminator + break + + if not discriminator_field_name: + return None + + mapping: dict[str, type] = {} + discriminator_alias: str | None = None + + for variant in get_args(union): + variant = strip_annotated_type(variant) + if is_basemodel_type(variant): + if PYDANTIC_V2: + field = _extract_field_schema_pv2(variant, discriminator_field_name) + if not field: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field.get("serialization_alias") + + field_schema = field["schema"] + + if field_schema["type"] == "literal": + for entry in field_schema["expected"]: + if isinstance(entry, str): + mapping[entry] = variant + else: + field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + if not field_info: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field_info.alias + + if field_info.annotation and is_literal_type(field_info.annotation): + for entry in get_args(field_info.annotation): + if isinstance(entry, str): + mapping[entry] = variant + + if not mapping: + return None + + details = DiscriminatorDetails( + mapping=mapping, + discriminator_field=discriminator_field_name, + discriminator_alias=discriminator_alias, + ) + cast(CachedDiscriminatorType, union).__discriminator__ = details + return details + + +def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None: + schema = model.__pydantic_core_schema__ + if schema["type"] != "model": + return None + + fields_schema = schema["schema"] + if fields_schema["type"] != "model-fields": + return None + + fields_schema = cast("ModelFieldsSchema", fields_schema) + + field = fields_schema["fields"].get(field_name) + if not field: + return None + + return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast] + + +def validate_type(*, type_: type[_T], value: object) -> _T: + """Strict validation that the given value matches the expected type""" + if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): + return cast(_T, parse_obj(type_, value)) + + return cast(_T, _validate_non_model_type(type_=type_, value=value)) + + +# our use of subclasssing here causes weirdness for type checkers, +# so we just pretend that we don't subclass +if TYPE_CHECKING: + GenericModel = BaseModel +else: + + class GenericModel(BaseGenericModel, BaseModel): + pass + + +if PYDANTIC_V2: + from pydantic import TypeAdapter as _TypeAdapter + + _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) + + if TYPE_CHECKING: + from pydantic import TypeAdapter + else: + TypeAdapter = _CachedTypeAdapter + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + return TypeAdapter(type_).validate_python(value) + +elif not TYPE_CHECKING: # TODO: condition is weird + + class RootModel(GenericModel, Generic[_T]): + """Used as a placeholder to easily convert runtime types to a Pydantic format + to provide validation. + + For example: + ```py + validated = RootModel[int](__root__="5").__root__ + # validated: 5 + ``` + """ + + __root__: _T + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + model = _create_pydantic_model(type_).validate(value) + return cast(_T, model.__root__) + + def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]: + return RootModel[type_] # type: ignore + + +class FinalRequestOptionsInput(TypedDict, total=False): + method: Required[str] + url: Required[str] + params: Query + headers: Headers + max_retries: int + timeout: float | Timeout | None + files: HttpxRequestFiles | None + idempotency_key: str + json_data: Body + extra_json: AnyMapping + + +@final +class FinalRequestOptions(pydantic.BaseModel): + method: str + url: str + params: Query = {} + headers: Union[Headers, NotGiven] = NotGiven() + max_retries: Union[int, NotGiven] = NotGiven() + timeout: Union[float, Timeout, None, NotGiven] = NotGiven() + files: Union[HttpxRequestFiles, None] = None + idempotency_key: Union[str, None] = None + post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() + + # It should be noted that we cannot use `json` here as that would override + # a BaseModel method in an incompatible fashion. + json_data: Union[Body, None] = None + extra_json: Union[AnyMapping, None] = None + + if PYDANTIC_V2: + model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) + else: + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + arbitrary_types_allowed: bool = True + + def get_max_retries(self, max_retries: int) -> int: + if isinstance(self.max_retries, NotGiven): + return max_retries + return self.max_retries + + def _strip_raw_response_header(self) -> None: + if not is_given(self.headers): + return + + if self.headers.get(RAW_RESPONSE_HEADER): + self.headers = {**self.headers} + self.headers.pop(RAW_RESPONSE_HEADER) + + # override the `construct` method so that we can run custom transformations. + # this is necessary as we don't want to do any actual runtime type checking + # (which means we can't use validators) but we do want to ensure that `NotGiven` + # values are not present + # + # type ignore required because we're adding explicit types to `**values` + @classmethod + def construct( # type: ignore + cls, + _fields_set: set[str] | None = None, + **values: Unpack[FinalRequestOptionsInput], + ) -> FinalRequestOptions: + kwargs: dict[str, Any] = { + # we unconditionally call `strip_not_given` on any value + # as it will just ignore any non-mapping types + key: strip_not_given(value) + for key, value in values.items() + } + if PYDANTIC_V2: + return super().model_construct(_fields_set, **kwargs) + return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + model_construct = construct diff --git a/.venv/Lib/site-packages/openai/_module_client.py b/.venv/Lib/site-packages/openai/_module_client.py new file mode 100644 index 00000000..6f7356eb --- /dev/null +++ b/.venv/Lib/site-packages/openai/_module_client.py @@ -0,0 +1,85 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import override + +from . import resources, _load_client +from ._utils import LazyProxy + + +class ChatProxy(LazyProxy[resources.Chat]): + @override + def __load__(self) -> resources.Chat: + return _load_client().chat + + +class BetaProxy(LazyProxy[resources.Beta]): + @override + def __load__(self) -> resources.Beta: + return _load_client().beta + + +class FilesProxy(LazyProxy[resources.Files]): + @override + def __load__(self) -> resources.Files: + return _load_client().files + + +class AudioProxy(LazyProxy[resources.Audio]): + @override + def __load__(self) -> resources.Audio: + return _load_client().audio + + +class ImagesProxy(LazyProxy[resources.Images]): + @override + def __load__(self) -> resources.Images: + return _load_client().images + + +class ModelsProxy(LazyProxy[resources.Models]): + @override + def __load__(self) -> resources.Models: + return _load_client().models + + +class BatchesProxy(LazyProxy[resources.Batches]): + @override + def __load__(self) -> resources.Batches: + return _load_client().batches + + +class EmbeddingsProxy(LazyProxy[resources.Embeddings]): + @override + def __load__(self) -> resources.Embeddings: + return _load_client().embeddings + + +class CompletionsProxy(LazyProxy[resources.Completions]): + @override + def __load__(self) -> resources.Completions: + return _load_client().completions + + +class ModerationsProxy(LazyProxy[resources.Moderations]): + @override + def __load__(self) -> resources.Moderations: + return _load_client().moderations + + +class FineTuningProxy(LazyProxy[resources.FineTuning]): + @override + def __load__(self) -> resources.FineTuning: + return _load_client().fine_tuning + + +chat: resources.Chat = ChatProxy().__as_proxied__() +beta: resources.Beta = BetaProxy().__as_proxied__() +files: resources.Files = FilesProxy().__as_proxied__() +audio: resources.Audio = AudioProxy().__as_proxied__() +images: resources.Images = ImagesProxy().__as_proxied__() +models: resources.Models = ModelsProxy().__as_proxied__() +batches: resources.Batches = BatchesProxy().__as_proxied__() +embeddings: resources.Embeddings = EmbeddingsProxy().__as_proxied__() +completions: resources.Completions = CompletionsProxy().__as_proxied__() +moderations: resources.Moderations = ModerationsProxy().__as_proxied__() +fine_tuning: resources.FineTuning = FineTuningProxy().__as_proxied__() diff --git a/.venv/Lib/site-packages/openai/_qs.py b/.venv/Lib/site-packages/openai/_qs.py new file mode 100644 index 00000000..274320ca --- /dev/null +++ b/.venv/Lib/site-packages/openai/_qs.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +from typing import Any, List, Tuple, Union, Mapping, TypeVar +from urllib.parse import parse_qs, urlencode +from typing_extensions import Literal, get_args + +from ._types import NOT_GIVEN, NotGiven, NotGivenOr +from ._utils import flatten + +_T = TypeVar("_T") + + +ArrayFormat = Literal["comma", "repeat", "indices", "brackets"] +NestedFormat = Literal["dots", "brackets"] + +PrimitiveData = Union[str, int, float, bool, None] +# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"] +# https://github.com/microsoft/pyright/issues/3555 +Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"] +Params = Mapping[str, Data] + + +class Querystring: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + *, + array_format: ArrayFormat = "repeat", + nested_format: NestedFormat = "brackets", + ) -> None: + self.array_format = array_format + self.nested_format = nested_format + + def parse(self, query: str) -> Mapping[str, object]: + # Note: custom format syntax is not supported yet + return parse_qs(query) + + def stringify( + self, + params: Params, + *, + array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, + nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + ) -> str: + return urlencode( + self.stringify_items( + params, + array_format=array_format, + nested_format=nested_format, + ) + ) + + def stringify_items( + self, + params: Params, + *, + array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, + nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + ) -> list[tuple[str, str]]: + opts = Options( + qs=self, + array_format=array_format, + nested_format=nested_format, + ) + return flatten([self._stringify_item(key, value, opts) for key, value in params.items()]) + + def _stringify_item( + self, + key: str, + value: Data, + opts: Options, + ) -> list[tuple[str, str]]: + if isinstance(value, Mapping): + items: list[tuple[str, str]] = [] + nested_format = opts.nested_format + for subkey, subvalue in value.items(): + items.extend( + self._stringify_item( + # TODO: error if unknown format + f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]", + subvalue, + opts, + ) + ) + return items + + if isinstance(value, (list, tuple)): + array_format = opts.array_format + if array_format == "comma": + return [ + ( + key, + ",".join(self._primitive_value_to_str(item) for item in value if item is not None), + ), + ] + elif array_format == "repeat": + items = [] + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + elif array_format == "indices": + raise NotImplementedError("The array indices format is not supported yet") + elif array_format == "brackets": + items = [] + key = key + "[]" + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + else: + raise NotImplementedError( + f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}" + ) + + serialised = self._primitive_value_to_str(value) + if not serialised: + return [] + return [(key, serialised)] + + def _primitive_value_to_str(self, value: PrimitiveData) -> str: + # copied from httpx + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +_qs = Querystring() +parse = _qs.parse +stringify = _qs.stringify +stringify_items = _qs.stringify_items + + +class Options: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + qs: Querystring = _qs, + *, + array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, + nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + ) -> None: + self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format + self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format diff --git a/.venv/Lib/site-packages/openai/_resource.py b/.venv/Lib/site-packages/openai/_resource.py new file mode 100644 index 00000000..fff9ba19 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_resource.py @@ -0,0 +1,43 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import time +from typing import TYPE_CHECKING + +import anyio + +if TYPE_CHECKING: + from ._client import OpenAI, AsyncOpenAI + + +class SyncAPIResource: + _client: OpenAI + + def __init__(self, client: OpenAI) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + def _sleep(self, seconds: float) -> None: + time.sleep(seconds) + + +class AsyncAPIResource: + _client: AsyncOpenAI + + def __init__(self, client: AsyncOpenAI) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + async def _sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) diff --git a/.venv/Lib/site-packages/openai/_response.py b/.venv/Lib/site-packages/openai/_response.py new file mode 100644 index 00000000..4ba2ae68 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_response.py @@ -0,0 +1,832 @@ +from __future__ import annotations + +import os +import inspect +import logging +import datetime +import functools +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Union, + Generic, + TypeVar, + Callable, + Iterator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Awaitable, ParamSpec, override, get_origin + +import anyio +import httpx +import pydantic + +from ._types import NoneType +from ._utils import is_given, extract_type_arg, is_annotated_type, extract_type_var_from_base +from ._models import BaseModel, is_basemodel +from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER +from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type +from ._exceptions import OpenAIError, APIResponseValidationError + +if TYPE_CHECKING: + from ._models import FinalRequestOptions + from ._base_client import BaseClient + + +P = ParamSpec("P") +R = TypeVar("R") +_T = TypeVar("_T") +_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]") +_AsyncAPIResponseT = TypeVar("_AsyncAPIResponseT", bound="AsyncAPIResponse[Any]") + +log: logging.Logger = logging.getLogger(__name__) + + +class BaseAPIResponse(Generic[R]): + _cast_to: type[R] + _client: BaseClient[Any, Any] + _parsed_by_type: dict[type[Any], Any] + _is_sse_stream: bool + _stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None + _options: FinalRequestOptions + + http_response: httpx.Response + + def __init__( + self, + *, + raw: httpx.Response, + cast_to: type[R], + client: BaseClient[Any, Any], + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + options: FinalRequestOptions, + ) -> None: + self._cast_to = cast_to + self._client = client + self._parsed_by_type = {} + self._is_sse_stream = stream + self._stream_cls = stream_cls + self._options = options + self.http_response = raw + + @property + def headers(self) -> httpx.Headers: + return self.http_response.headers + + @property + def http_request(self) -> httpx.Request: + """Returns the httpx Request instance associated with the current response.""" + return self.http_response.request + + @property + def status_code(self) -> int: + return self.http_response.status_code + + @property + def url(self) -> httpx.URL: + """Returns the URL for which the request was made.""" + return self.http_response.url + + @property + def method(self) -> str: + return self.http_request.method + + @property + def http_version(self) -> str: + return self.http_response.http_version + + @property + def elapsed(self) -> datetime.timedelta: + """The time taken for the complete request/response cycle to complete.""" + return self.http_response.elapsed + + @property + def is_closed(self) -> bool: + """Whether or not the response body has been closed. + + If this is False then there is response data that has not been read yet. + You must either fully consume the response body or call `.close()` + before discarding the response to prevent resource leaks. + """ + return self.http_response.is_closed + + @override + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_to}>" + ) + + def _parse(self, *, to: type[_T] | None = None) -> R | _T: + # unwrap `Annotated[T, ...]` -> `T` + if to and is_annotated_type(to): + to = extract_type_arg(to, 0) + + if self._is_sse_stream: + if to: + if not is_stream_class_type(to): + raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}") + + return cast( + _T, + to( + cast_to=extract_stream_chunk_type( + to, + failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]", + ), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if self._stream_cls: + return cast( + R, + self._stream_cls( + cast_to=extract_stream_chunk_type(self._stream_cls), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls) + if stream_cls is None: + raise MissingStreamClassError() + + return cast( + R, + stream_cls( + cast_to=self._cast_to, + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + cast_to = to if to is not None else self._cast_to + + # unwrap `Annotated[T, ...]` -> `T` + if is_annotated_type(cast_to): + cast_to = extract_type_arg(cast_to, 0) + + if cast_to is NoneType: + return cast(R, None) + + response = self.http_response + if cast_to == str: + return cast(R, response.text) + + if cast_to == bytes: + return cast(R, response.content) + + if cast_to == int: + return cast(R, int(response.text)) + + if cast_to == float: + return cast(R, float(response.text)) + + origin = get_origin(cast_to) or cast_to + + # handle the legacy binary response case + if inspect.isclass(cast_to) and cast_to.__name__ == "HttpxBinaryResponseContent": + return cast(R, cast_to(response)) # type: ignore + + if origin == APIResponse: + raise RuntimeError("Unexpected state - cast_to is `APIResponse`") + + if inspect.isclass(origin) and issubclass(origin, httpx.Response): + # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response + # and pass that class to our request functions. We cannot change the variance to be either + # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct + # the response class ourselves but that is something that should be supported directly in httpx + # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. + if cast_to != httpx.Response: + raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") + return cast(R, response) + + if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel): + raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`") + + if ( + cast_to is not object + and not origin is list + and not origin is dict + and not origin is Union + and not issubclass(origin, BaseModel) + ): + raise RuntimeError( + f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." + ) + + # split is required to handle cases where additional information is included + # in the response, e.g. application/json; charset=utf-8 + content_type, *_ = response.headers.get("content-type", "*").split(";") + if content_type != "application/json": + if is_basemodel(cast_to): + try: + data = response.json() + except Exception as exc: + log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) + else: + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + if self._client._strict_response_validation: + raise APIResponseValidationError( + response=response, + message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", + body=response.text, + ) + + # If the API responds with content that isn't JSON then we just return + # the (decoded) text without performing any parsing so that you can still + # handle the response however you need to. + return response.text # type: ignore + + data = response.json() + + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + +class APIResponse(BaseAPIResponse[R]): + @property + def request_id(self) -> str | None: + return self.http_response.headers.get("x-request-id") # type: ignore[no-any-return] + + @overload + def parse(self, *, to: type[_T]) -> _T: + ... + + @overload + def parse(self) -> R: + ... + + def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from openai import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `int` + - `float` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return self.http_response.read() + except httpx.StreamConsumed as exc: + # The default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message. + raise StreamAlreadyConsumed() from exc + + def text(self) -> str: + """Read and decode the response content into a string.""" + self.read() + return self.http_response.text + + def json(self) -> object: + """Read and decode the JSON response content.""" + self.read() + return self.http_response.json() + + def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.http_response.close() + + def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + for chunk in self.http_response.iter_bytes(chunk_size): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + for chunk in self.http_response.iter_text(chunk_size): + yield chunk + + def iter_lines(self) -> Iterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + for chunk in self.http_response.iter_lines(): + yield chunk + + +class AsyncAPIResponse(BaseAPIResponse[R]): + @property + def request_id(self) -> str | None: + return self.http_response.headers.get("x-request-id") # type: ignore[no-any-return] + + @overload + async def parse(self, *, to: type[_T]) -> _T: + ... + + @overload + async def parse(self) -> R: + ... + + async def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from openai import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + await self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + async def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return await self.http_response.aread() + except httpx.StreamConsumed as exc: + # the default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message + raise StreamAlreadyConsumed() from exc + + async def text(self) -> str: + """Read and decode the response content into a string.""" + await self.read() + return self.http_response.text + + async def json(self) -> object: + """Read and decode the JSON response content.""" + await self.read() + return self.http_response.json() + + async def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.http_response.aclose() + + async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + async for chunk in self.http_response.aiter_bytes(chunk_size): + yield chunk + + async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + async for chunk in self.http_response.aiter_text(chunk_size): + yield chunk + + async def iter_lines(self) -> AsyncIterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + async for chunk in self.http_response.aiter_lines(): + yield chunk + + +class BinaryAPIResponse(APIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(): + f.write(data) + + +class AsyncBinaryAPIResponse(AsyncAPIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + async def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(): + await f.write(data) + + +class StreamedBinaryAPIResponse(APIResponse[bytes]): + def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(chunk_size): + f.write(data) + + +class AsyncStreamedBinaryAPIResponse(AsyncAPIResponse[bytes]): + async def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(chunk_size): + await f.write(data) + + +class MissingStreamClassError(TypeError): + def __init__(self) -> None: + super().__init__( + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `openai._streaming` for reference", + ) + + +class StreamAlreadyConsumed(OpenAIError): + """ + Attempted to read or stream content, but the content has already + been streamed. + + This can happen if you use a method like `.iter_lines()` and then attempt + to read th entire response body afterwards, e.g. + + ```py + response = await client.post(...) + async for line in response.iter_lines(): + ... # do something with `line` + + content = await response.read() + # ^ error + ``` + + If you want this behaviour you'll need to either manually accumulate the response + content or call `await response.read()` before iterating over the stream. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. " + "This could be due to attempting to stream the response " + "content more than once." + "\n\n" + "You can fix this by manually accumulating the response content while streaming " + "or by calling `.read()` before starting to stream." + ) + super().__init__(message) + + +class ResponseContextManager(Generic[_APIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, request_func: Callable[[], _APIResponseT]) -> None: + self._request_func = request_func + self.__response: _APIResponseT | None = None + + def __enter__(self) -> _APIResponseT: + self.__response = self._request_func() + return self.__response + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + self.__response.close() + + +class AsyncResponseContextManager(Generic[_AsyncAPIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, api_request: Awaitable[_AsyncAPIResponseT]) -> None: + self._api_request = api_request + self.__response: _AsyncAPIResponseT | None = None + + async def __aenter__(self) -> _AsyncAPIResponseT: + self.__response = await self._api_request + return self.__response + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + await self.__response.close() + + +def to_streamed_response_wrapper(func: Callable[P, R]) -> Callable[P, ResponseContextManager[APIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[APIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], APIResponse[R]], make_request)) + + return wrapped + + +def async_to_streamed_response_wrapper( + func: Callable[P, Awaitable[R]], +) -> Callable[P, AsyncResponseContextManager[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[AsyncAPIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[AsyncAPIResponse[R]], make_request)) + + return wrapped + + +def to_custom_streamed_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, ResponseContextManager[_APIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[_APIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], _APIResponseT], make_request)) + + return wrapped + + +def async_to_custom_streamed_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, AsyncResponseContextManager[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[_AsyncAPIResponseT], make_request)) + + return wrapped + + +def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(APIResponse[R], func(*args, **kwargs)) + + return wrapped + + +def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + async def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncAPIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(AsyncAPIResponse[R], await func(*args, **kwargs)) + + return wrapped + + +def to_custom_raw_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, _APIResponseT]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> _APIResponseT: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(_APIResponseT, func(*args, **kwargs)) + + return wrapped + + +def async_to_custom_raw_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, Awaitable[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> Awaitable[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(Awaitable[_AsyncAPIResponseT], func(*args, **kwargs)) + + return wrapped + + +def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type: + """Given a type like `APIResponse[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(APIResponse[bytes]): + ... + + extract_response_type(MyResponse) -> bytes + ``` + """ + return extract_type_var_from_base( + typ, + generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse, AsyncAPIResponse)), + index=0, + ) diff --git a/.venv/Lib/site-packages/openai/_streaming.py b/.venv/Lib/site-packages/openai/_streaming.py new file mode 100644 index 00000000..0fda992c --- /dev/null +++ b/.venv/Lib/site-packages/openai/_streaming.py @@ -0,0 +1,410 @@ +# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py +from __future__ import annotations + +import json +import inspect +from types import TracebackType +from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast +from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable + +import httpx + +from ._utils import is_mapping, extract_type_var_from_base +from ._exceptions import APIError + +if TYPE_CHECKING: + from ._client import OpenAI, AsyncOpenAI + + +_T = TypeVar("_T") + + +class Stream(Generic[_T]): + """Provides the core interface to iterate over a synchronous stream response.""" + + response: httpx.Response + + _decoder: SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: OpenAI, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + def __next__(self) -> _T: + return self._iterator.__next__() + + def __iter__(self) -> Iterator[_T]: + for item in self._iterator: + yield item + + def _iter_events(self) -> Iterator[ServerSentEvent]: + yield from self._decoder.iter_bytes(self.response.iter_bytes()) + + def __stream__(self) -> Iterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + for sse in iterator: + if sse.data.startswith("[DONE]"): + break + + if sse.event is None: + data = sse.json() + if is_mapping(data) and data.get("error"): + message = None + error = data.get("error") + if is_mapping(error): + message = error.get("message") + if not message or not isinstance(message, str): + message = "An error occurred during streaming" + + raise APIError( + message=message, + request=self.response.request, + body=data["error"], + ) + + yield process_data(data=data, cast_to=cast_to, response=response) + + else: + data = sse.json() + + if sse.event == "error" and is_mapping(data) and data.get("error"): + message = None + error = data.get("error") + if is_mapping(error): + message = error.get("message") + if not message or not isinstance(message, str): + message = "An error occurred during streaming" + + raise APIError( + message=message, + request=self.response.request, + body=data["error"], + ) + + yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) + + # Ensure the entire stream is consumed + for _sse in iterator: + ... + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.response.close() + + +class AsyncStream(Generic[_T]): + """Provides the core interface to iterate over an asynchronous stream response.""" + + response: httpx.Response + + _decoder: SSEDecoder | SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: AsyncOpenAI, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + async def __anext__(self) -> _T: + return await self._iterator.__anext__() + + async def __aiter__(self) -> AsyncIterator[_T]: + async for item in self._iterator: + yield item + + async def _iter_events(self) -> AsyncIterator[ServerSentEvent]: + async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()): + yield sse + + async def __stream__(self) -> AsyncIterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + async for sse in iterator: + if sse.data.startswith("[DONE]"): + break + + if sse.event is None: + data = sse.json() + if is_mapping(data) and data.get("error"): + message = None + error = data.get("error") + if is_mapping(error): + message = error.get("message") + if not message or not isinstance(message, str): + message = "An error occurred during streaming" + + raise APIError( + message=message, + request=self.response.request, + body=data["error"], + ) + + yield process_data(data=data, cast_to=cast_to, response=response) + + else: + data = sse.json() + + if sse.event == "error" and is_mapping(data) and data.get("error"): + message = None + error = data.get("error") + if is_mapping(error): + message = error.get("message") + if not message or not isinstance(message, str): + message = "An error occurred during streaming" + + raise APIError( + message=message, + request=self.response.request, + body=data["error"], + ) + + yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) + + # Ensure the entire stream is consumed + async for _sse in iterator: + ... + + async def __aenter__(self) -> Self: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.response.aclose() + + +class ServerSentEvent: + def __init__( + self, + *, + event: str | None = None, + data: str | None = None, + id: str | None = None, + retry: int | None = None, + ) -> None: + if data is None: + data = "" + + self._id = id + self._data = data + self._event = event or None + self._retry = retry + + @property + def event(self) -> str | None: + return self._event + + @property + def id(self) -> str | None: + return self._id + + @property + def retry(self) -> int | None: + return self._retry + + @property + def data(self) -> str: + return self._data + + def json(self) -> Any: + return json.loads(self.data) + + @override + def __repr__(self) -> str: + return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})" + + +class SSEDecoder: + _data: list[str] + _event: str | None + _retry: int | None + _last_event_id: str | None + + def __init__(self) -> None: + self._event = None + self._data = [] + self._last_event_id = None + self._retry = None + + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + for chunk in self._iter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + async for chunk in self._aiter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + async for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + def decode(self, line: str) -> ServerSentEvent | None: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = None + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None + + +@runtime_checkable +class SSEBytesDecoder(Protocol): + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + +def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]: + """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`""" + origin = get_origin(typ) or typ + return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream)) + + +def extract_stream_chunk_type( + stream_cls: type, + *, + failure_message: str | None = None, +) -> type: + """Given a type like `Stream[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyStream(Stream[bytes]): + ... + + extract_stream_chunk_type(MyStream) -> bytes + ``` + """ + from ._base_client import Stream, AsyncStream + + return extract_type_var_from_base( + stream_cls, + index=0, + generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)), + failure_message=failure_message, + ) diff --git a/.venv/Lib/site-packages/openai/_types.py b/.venv/Lib/site-packages/openai/_types.py new file mode 100644 index 00000000..de9b1dd4 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_types.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + Dict, + List, + Type, + Tuple, + Union, + Mapping, + TypeVar, + Callable, + Optional, + Sequence, +) +from typing_extensions import Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable + +import httpx +import pydantic +from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport + +if TYPE_CHECKING: + from ._models import BaseModel + from ._response import APIResponse, AsyncAPIResponse + from ._legacy_response import HttpxBinaryResponseContent + +Transport = BaseTransport +AsyncTransport = AsyncBaseTransport +Query = Mapping[str, object] +Body = object +AnyMapping = Mapping[str, object] +ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) +_T = TypeVar("_T") + + +# Approximates httpx internal ProxiesTypes and RequestFiles types +# while adding support for `PathLike` instances +ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]] +ProxiesTypes = Union[str, Proxy, ProxiesDict] +if TYPE_CHECKING: + Base64FileInput = Union[IO[bytes], PathLike[str]] + FileContent = Union[IO[bytes], bytes, PathLike[str]] +else: + Base64FileInput = Union[IO[bytes], PathLike] + FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +# duplicate of the above but without our custom file support +HttpxFileContent = Union[IO[bytes], bytes] +HttpxFileTypes = Union[ + # file (or bytes) + HttpxFileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], HttpxFileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], HttpxFileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], +] +HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]] + +# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT +# where ResponseT includes `None`. In order to support directly +# passing `None`, overloads would have to be defined for every +# method that uses `ResponseT` which would lead to an unacceptable +# amount of code duplication and make it unreadable. See _base_client.py +# for example usage. +# +# This unfortunately means that you will either have +# to import this type and pass it explicitly: +# +# from openai import NoneType +# client.get('/foo', cast_to=NoneType) +# +# or build it yourself: +# +# client.get('/foo', cast_to=type(None)) +if TYPE_CHECKING: + NoneType: Type[None] +else: + NoneType = type(None) + + +class RequestOptions(TypedDict, total=False): + headers: Headers + max_retries: int + timeout: float | Timeout | None + params: Query + extra_json: AnyMapping + idempotency_key: str + + +# Sentinel class used until PEP 0661 is accepted +class NotGiven: + """ + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different behavior). + + For example: + + ```py + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + get(timeout=1) # 1s timeout + get(timeout=None) # No timeout + get() # Default timeout behavior, which may not be statically known at the method definition. + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + @override + def __repr__(self) -> str: + return "NOT_GIVEN" + + +NotGivenOr = Union[_T, NotGiven] +NOT_GIVEN = NotGiven() + + +class Omit: + """In certain situations you need to be able to represent a case where a default value has + to be explicitly removed and `None` is not an appropriate substitute, for example: + + ```py + # as the default `Content-Type` header is `application/json` that will be sent + client.post("/upload/files", files={"file": b"my raw file content"}) + + # you can't explicitly override the header as it has to be dynamically generated + # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' + client.post(..., headers={"Content-Type": "multipart/form-data"}) + + # instead you can remove the default `application/json` header by passing Omit + client.post(..., headers={"Content-Type": Omit()}) + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + +@runtime_checkable +class ModelBuilderProtocol(Protocol): + @classmethod + def build( + cls: type[_T], + *, + response: Response, + data: object, + ) -> _T: + ... + + +Headers = Mapping[str, Union[str, Omit]] + + +class HeadersLikeProtocol(Protocol): + def get(self, __key: str) -> str | None: + ... + + +HeadersLike = Union[Headers, HeadersLikeProtocol] + +ResponseT = TypeVar( + "ResponseT", + bound=Union[ + object, + str, + None, + "BaseModel", + List[Any], + Dict[str, Any], + Response, + ModelBuilderProtocol, + "APIResponse[Any]", + "AsyncAPIResponse[Any]", + "HttpxBinaryResponseContent", + ], +) + +StrBytesIntFloat = Union[str, bytes, int, float] + +# Note: copied from Pydantic +# https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49 +IncEx: TypeAlias = "set[int] | set[str] | dict[int, Any] | dict[str, Any] | None" + +PostParser = Callable[[Any], Any] + + +@runtime_checkable +class InheritsGeneric(Protocol): + """Represents a type that has inherited from `Generic` + + The `__orig_bases__` property can be used to determine the resolved + type variable for a given base class. + """ + + __orig_bases__: tuple[_GenericAlias] + + +class _GenericAlias(Protocol): + __origin__: type[object] + + +class HttpxSendArgs(TypedDict, total=False): + auth: httpx.Auth diff --git a/.venv/Lib/site-packages/openai/_utils/__init__.py b/.venv/Lib/site-packages/openai/_utils/__init__.py new file mode 100644 index 00000000..31b5b227 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/__init__.py @@ -0,0 +1,51 @@ +from ._sync import asyncify as asyncify +from ._proxy import LazyProxy as LazyProxy +from ._utils import ( + flatten as flatten, + is_dict as is_dict, + is_list as is_list, + is_given as is_given, + is_tuple as is_tuple, + lru_cache as lru_cache, + is_mapping as is_mapping, + is_tuple_t as is_tuple_t, + parse_date as parse_date, + is_iterable as is_iterable, + is_sequence as is_sequence, + coerce_float as coerce_float, + is_mapping_t as is_mapping_t, + removeprefix as removeprefix, + removesuffix as removesuffix, + extract_files as extract_files, + is_sequence_t as is_sequence_t, + required_args as required_args, + coerce_boolean as coerce_boolean, + coerce_integer as coerce_integer, + file_from_path as file_from_path, + parse_datetime as parse_datetime, + strip_not_given as strip_not_given, + deepcopy_minimal as deepcopy_minimal, + get_async_library as get_async_library, + maybe_coerce_float as maybe_coerce_float, + get_required_header as get_required_header, + maybe_coerce_boolean as maybe_coerce_boolean, + maybe_coerce_integer as maybe_coerce_integer, +) +from ._typing import ( + is_list_type as is_list_type, + is_union_type as is_union_type, + extract_type_arg as extract_type_arg, + is_iterable_type as is_iterable_type, + is_required_type as is_required_type, + is_annotated_type as is_annotated_type, + strip_annotated_type as strip_annotated_type, + extract_type_var_from_base as extract_type_var_from_base, +) +from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator +from ._transform import ( + PropertyInfo as PropertyInfo, + transform as transform, + async_transform as async_transform, + maybe_transform as maybe_transform, + async_maybe_transform as async_maybe_transform, +) diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..42d9ddb9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/_logs.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/_logs.cpython-311.pyc new file mode 100644 index 00000000..395512e9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/_logs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/_proxy.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/_proxy.cpython-311.pyc new file mode 100644 index 00000000..39e250f1 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/_streams.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/_streams.cpython-311.pyc new file mode 100644 index 00000000..a980145d Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/_streams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/_sync.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/_sync.cpython-311.pyc new file mode 100644 index 00000000..ac56aed5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/_sync.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/_transform.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/_transform.cpython-311.pyc new file mode 100644 index 00000000..2586bf28 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/_transform.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/_typing.cpython-311.pyc new file mode 100644 index 00000000..664a6b28 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/openai/_utils/__pycache__/_utils.cpython-311.pyc new file mode 100644 index 00000000..34254ae9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/_utils/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/_utils/_logs.py b/.venv/Lib/site-packages/openai/_utils/_logs.py new file mode 100644 index 00000000..e5113fd8 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/_logs.py @@ -0,0 +1,25 @@ +import os +import logging + +logger: logging.Logger = logging.getLogger("openai") +httpx_logger: logging.Logger = logging.getLogger("httpx") + + +def _basic_config() -> None: + # e.g. [2023-10-05 14:12:26 - openai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" + logging.basicConfig( + format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def setup_logging() -> None: + env = os.environ.get("OPENAI_LOG") + if env == "debug": + _basic_config() + logger.setLevel(logging.DEBUG) + httpx_logger.setLevel(logging.DEBUG) + elif env == "info": + _basic_config() + logger.setLevel(logging.INFO) + httpx_logger.setLevel(logging.INFO) diff --git a/.venv/Lib/site-packages/openai/_utils/_proxy.py b/.venv/Lib/site-packages/openai/_utils/_proxy.py new file mode 100644 index 00000000..c46a62a6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/_proxy.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Generic, TypeVar, Iterable, cast +from typing_extensions import override + +T = TypeVar("T") + + +class LazyProxy(Generic[T], ABC): + """Implements data methods to pretend that an instance is another instance. + + This includes forwarding attribute access and other methods. + """ + + # Note: we have to special case proxies that themselves return proxies + # to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz` + + def __getattr__(self, attr: str) -> object: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied # pyright: ignore + return getattr(proxied, attr) + + @override + def __repr__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return repr(self.__get_proxied__()) + + @override + def __str__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return str(proxied) + + @override + def __dir__(self) -> Iterable[str]: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return [] + return proxied.__dir__() + + @property # type: ignore + @override + def __class__(self) -> type: # pyright: ignore + proxied = self.__get_proxied__() + if issubclass(type(proxied), LazyProxy): + return type(proxied) + return proxied.__class__ + + def __get_proxied__(self) -> T: + return self.__load__() + + def __as_proxied__(self) -> T: + """Helper method that returns the current proxy, typed as the loaded object""" + return cast(T, self) + + @abstractmethod + def __load__(self) -> T: + ... diff --git a/.venv/Lib/site-packages/openai/_utils/_streams.py b/.venv/Lib/site-packages/openai/_utils/_streams.py new file mode 100644 index 00000000..f4a0208f --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/_streams.py @@ -0,0 +1,12 @@ +from typing import Any +from typing_extensions import Iterator, AsyncIterator + + +def consume_sync_iterator(iterator: Iterator[Any]) -> None: + for _ in iterator: + ... + + +async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None: + async for _ in iterator: + ... diff --git a/.venv/Lib/site-packages/openai/_utils/_sync.py b/.venv/Lib/site-packages/openai/_utils/_sync.py new file mode 100644 index 00000000..595924e5 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/_sync.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import functools +from typing import TypeVar, Callable, Awaitable +from typing_extensions import ParamSpec + +import anyio +import anyio.to_thread + +T_Retval = TypeVar("T_Retval") +T_ParamSpec = ParamSpec("T_ParamSpec") + + +# copied from `asyncer`, https://github.com/tiangolo/asyncer +def asyncify( + function: Callable[T_ParamSpec, T_Retval], + *, + cancellable: bool = False, + limiter: anyio.CapacityLimiter | None = None, +) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: + """ + Take a blocking function and create an async one that receives the same + positional and keyword arguments, and that when called, calls the original function + in a worker thread using `anyio.to_thread.run_sync()`. Internally, + `asyncer.asyncify()` uses the same `anyio.to_thread.run_sync()`, but it supports + keyword arguments additional to positional arguments and it adds better support for + autocompletion and inline errors for the arguments of the function called and the + return value. + + If the `cancellable` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. + + Use it like this: + + ```Python + def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str: + # Do work + return "Some result" + + + result = await to_thread.asyncify(do_work)("spam", "ham", kwarg1="a", kwarg2="b") + print(result) + ``` + + ## Arguments + + `function`: a blocking regular callable (e.g. a function) + `cancellable`: `True` to allow cancellation of the operation + `limiter`: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + + ## Return + + An async function that takes the same positional and keyword arguments as the + original one, that when called runs the same original function in a thread worker + and returns the result. + """ + + async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval: + partial_f = functools.partial(function, *args, **kwargs) + return await anyio.to_thread.run_sync(partial_f, cancellable=cancellable, limiter=limiter) + + return wrapper diff --git a/.venv/Lib/site-packages/openai/_utils/_transform.py b/.venv/Lib/site-packages/openai/_utils/_transform.py new file mode 100644 index 00000000..47e262a5 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/_transform.py @@ -0,0 +1,382 @@ +from __future__ import annotations + +import io +import base64 +import pathlib +from typing import Any, Mapping, TypeVar, cast +from datetime import date, datetime +from typing_extensions import Literal, get_args, override, get_type_hints + +import anyio +import pydantic + +from ._utils import ( + is_list, + is_mapping, + is_iterable, +) +from .._files import is_base64_file_input +from ._typing import ( + is_list_type, + is_union_type, + extract_type_arg, + is_iterable_type, + is_required_type, + is_annotated_type, + strip_annotated_type, +) +from .._compat import model_dump, is_typeddict + +_T = TypeVar("_T") + + +# TODO: support for drilling globals() and locals() +# TODO: ensure works correctly with forward references in all cases + + +PropertyFormat = Literal["iso8601", "base64", "custom"] + + +class PropertyInfo: + """Metadata class to be used in Annotated types to provide information about a given type. + + For example: + + class MyParams(TypedDict): + account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')] + + This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API. + """ + + alias: str | None + format: PropertyFormat | None + format_template: str | None + discriminator: str | None + + def __init__( + self, + *, + alias: str | None = None, + format: PropertyFormat | None = None, + format_template: str | None = None, + discriminator: str | None = None, + ) -> None: + self.alias = alias + self.format = format + self.format_template = format_template + self.discriminator = discriminator + + @override + def __repr__(self) -> str: + return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')" + + +def maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `transform()` that allows `None` to be passed. + + See `transform()` for more details. + """ + if data is None: + return None + return transform(data, expected_type) + + +# Wrapper over _transform_recursive providing fake types +def transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = _transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +def _get_annotated_type(type_: type) -> type | None: + """If the given type is an `Annotated` type then it is returned, if not `None` is returned. + + This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` + """ + if is_required_type(type_): + # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]` + type_ = get_args(type_)[0] + + if is_annotated_type(type_): + return type_ + + return None + + +def _maybe_transform_key(key: str, type_: type) -> str: + """Transform the given `data` based on the annotations provided in `type_`. + + Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata. + """ + annotated_type = _get_annotated_type(type_) + if annotated_type is None: + # no `Annotated` definition for this type, no transformation needed + return key + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.alias is not None: + return annotation.alias + + return key + + +def _transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + if is_typeddict(stripped_type) and is_mapping(data): + return _transform_typeddict(data, stripped_type) + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + ): + inner_type = extract_type_arg(stripped_type, 0) + return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = _transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True) + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return _format_data(data, annotation.format, annotation.format_template) + + return data + + +def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = data.read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +def _transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_) + return result + + +async def async_maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `async_transform()` that allows `None` to be passed. + + See `async_transform()` for more details. + """ + if data is None: + return None + return await async_transform(data, expected_type) + + +async def async_transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +async def _async_transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + if is_typeddict(stripped_type) and is_mapping(data): + return await _async_transform_typeddict(data, stripped_type) + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + ): + inner_type = extract_type_arg(stripped_type, 0) + return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True) + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return await _async_format_data(data, annotation.format, annotation.format_template) + + return data + + +async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = await anyio.Path(data).read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +async def _async_transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_) + return result diff --git a/.venv/Lib/site-packages/openai/_utils/_typing.py b/.venv/Lib/site-packages/openai/_utils/_typing.py new file mode 100644 index 00000000..c036991f --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/_typing.py @@ -0,0 +1,120 @@ +from __future__ import annotations + +from typing import Any, TypeVar, Iterable, cast +from collections import abc as _c_abc +from typing_extensions import Required, Annotated, get_args, get_origin + +from .._types import InheritsGeneric +from .._compat import is_union as _is_union + + +def is_annotated_type(typ: type) -> bool: + return get_origin(typ) == Annotated + + +def is_list_type(typ: type) -> bool: + return (get_origin(typ) or typ) == list + + +def is_iterable_type(typ: type) -> bool: + """If the given type is `typing.Iterable[T]`""" + origin = get_origin(typ) or typ + return origin == Iterable or origin == _c_abc.Iterable + + +def is_union_type(typ: type) -> bool: + return _is_union(get_origin(typ)) + + +def is_required_type(typ: type) -> bool: + return get_origin(typ) == Required + + +def is_typevar(typ: type) -> bool: + # type ignore is required because type checkers + # think this expression will always return False + return type(typ) == TypeVar # type: ignore + + +# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] +def strip_annotated_type(typ: type) -> type: + if is_required_type(typ) or is_annotated_type(typ): + return strip_annotated_type(cast(type, get_args(typ)[0])) + + return typ + + +def extract_type_arg(typ: type, index: int) -> type: + args = get_args(typ) + try: + return cast(type, args[index]) + except IndexError as err: + raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err + + +def extract_type_var_from_base( + typ: type, + *, + generic_bases: tuple[type, ...], + index: int, + failure_message: str | None = None, +) -> type: + """Given a type like `Foo[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(Foo[bytes]): + ... + + extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes + ``` + + And where a generic subclass is given: + ```py + _T = TypeVar('_T') + class MyResponse(Foo[_T]): + ... + + extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes + ``` + """ + cls = cast(object, get_origin(typ) or typ) + if cls in generic_bases: + # we're given the class directly + return extract_type_arg(typ, index) + + # if a subclass is given + # --- + # this is needed as __orig_bases__ is not present in the typeshed stubs + # because it is intended to be for internal use only, however there does + # not seem to be a way to resolve generic TypeVars for inherited subclasses + # without using it. + if isinstance(cls, InheritsGeneric): + target_base_class: Any | None = None + for base in cls.__orig_bases__: + if base.__origin__ in generic_bases: + target_base_class = base + break + + if target_base_class is None: + raise RuntimeError( + "Could not find the generic base class;\n" + "This should never happen;\n" + f"Does {cls} inherit from one of {generic_bases} ?" + ) + + extracted = extract_type_arg(target_base_class, index) + if is_typevar(extracted): + # If the extracted type argument is itself a type variable + # then that means the subclass itself is generic, so we have + # to resolve the type argument from the class itself, not + # the base class. + # + # Note: if there is more than 1 type argument, the subclass could + # change the ordering of the type arguments, this is not currently + # supported. + return extract_type_arg(typ, index) + + return extracted + + raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}") diff --git a/.venv/Lib/site-packages/openai/_utils/_utils.py b/.venv/Lib/site-packages/openai/_utils/_utils.py new file mode 100644 index 00000000..17904ce6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_utils/_utils.py @@ -0,0 +1,403 @@ +from __future__ import annotations + +import os +import re +import inspect +import functools +from typing import ( + Any, + Tuple, + Mapping, + TypeVar, + Callable, + Iterable, + Sequence, + cast, + overload, +) +from pathlib import Path +from typing_extensions import TypeGuard + +import sniffio + +from .._types import Headers, NotGiven, FileTypes, NotGivenOr, HeadersLike +from .._compat import parse_date as parse_date, parse_datetime as parse_datetime + +_T = TypeVar("_T") +_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) +_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) +_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) +CallableT = TypeVar("CallableT", bound=Callable[..., Any]) + + +def flatten(t: Iterable[Iterable[_T]]) -> list[_T]: + return [item for sublist in t for item in sublist] + + +def extract_files( + # TODO: this needs to take Dict but variance issues..... + # create protocol type ? + query: Mapping[str, object], + *, + paths: Sequence[Sequence[str]], +) -> list[tuple[str, FileTypes]]: + """Recursively extract files from the given dictionary based on specified paths. + + A path may look like this ['foo', 'files', '', 'data']. + + Note: this mutates the given dictionary. + """ + files: list[tuple[str, FileTypes]] = [] + for path in paths: + files.extend(_extract_items(query, path, index=0, flattened_key=None)) + return files + + +def _extract_items( + obj: object, + path: Sequence[str], + *, + index: int, + flattened_key: str | None, +) -> list[tuple[str, FileTypes]]: + try: + key = path[index] + except IndexError: + if isinstance(obj, NotGiven): + # no value was provided - we can safely ignore + return [] + + # cyclical import + from .._files import assert_is_file_content + + # We have exhausted the path, return the entry we found. + assert_is_file_content(obj, key=flattened_key) + assert flattened_key is not None + return [(flattened_key, cast(FileTypes, obj))] + + index += 1 + if is_dict(obj): + try: + # We are at the last entry in the path so we must remove the field + if (len(path)) == index: + item = obj.pop(key) + else: + item = obj[key] + except KeyError: + # Key was not present in the dictionary, this is not indicative of an error + # as the given path may not point to a required field. We also do not want + # to enforce required fields as the API may differ from the spec in some cases. + return [] + if flattened_key is None: + flattened_key = key + else: + flattened_key += f"[{key}]" + return _extract_items( + item, + path, + index=index, + flattened_key=flattened_key, + ) + elif is_list(obj): + if key != "": + return [] + + return flatten( + [ + _extract_items( + item, + path, + index=index, + flattened_key=flattened_key + "[]" if flattened_key is not None else "[]", + ) + for item in obj + ] + ) + + # Something unexpected was passed, just ignore it. + return [] + + +def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]: + return not isinstance(obj, NotGiven) + + +# Type safe methods for narrowing types with TypeVars. +# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], +# however this cause Pyright to rightfully report errors. As we know we don't +# care about the contained types we can safely use `object` in it's place. +# +# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. +# `is_*` is for when you're dealing with an unknown input +# `is_*_t` is for when you're narrowing a known union type to a specific subset + + +def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: + return isinstance(obj, tuple) + + +def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: + return isinstance(obj, tuple) + + +def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: + return isinstance(obj, Sequence) + + +def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: + return isinstance(obj, Sequence) + + +def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: + return isinstance(obj, Mapping) + + +def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: + return isinstance(obj, Mapping) + + +def is_dict(obj: object) -> TypeGuard[dict[object, object]]: + return isinstance(obj, dict) + + +def is_list(obj: object) -> TypeGuard[list[object]]: + return isinstance(obj, list) + + +def is_iterable(obj: object) -> TypeGuard[Iterable[object]]: + return isinstance(obj, Iterable) + + +def deepcopy_minimal(item: _T) -> _T: + """Minimal reimplementation of copy.deepcopy() that will only copy certain object types: + + - mappings, e.g. `dict` + - list + + This is done for performance reasons. + """ + if is_mapping(item): + return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()}) + if is_list(item): + return cast(_T, [deepcopy_minimal(entry) for entry in item]) + return item + + +# copied from https://github.com/Rapptz/RoboDanny +def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str: + size = len(seq) + if size == 0: + return "" + + if size == 1: + return seq[0] + + if size == 2: + return f"{seq[0]} {final} {seq[1]}" + + return delim.join(seq[:-1]) + f" {final} {seq[-1]}" + + +def quote(string: str) -> str: + """Add single quotation marks around the given string. Does *not* do any escaping.""" + return f"'{string}'" + + +def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: + """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function. + + Useful for enforcing runtime validation of overloaded functions. + + Example usage: + ```py + @overload + def foo(*, a: str) -> str: + ... + + + @overload + def foo(*, b: bool) -> str: + ... + + + # This enforces the same constraints that a static type checker would + # i.e. that either a or b must be passed to the function + @required_args(["a"], ["b"]) + def foo(*, a: str | None = None, b: bool | None = None) -> str: + ... + ``` + """ + + def inner(func: CallableT) -> CallableT: + params = inspect.signature(func).parameters + positional = [ + name + for name, param in params.items() + if param.kind + in { + param.POSITIONAL_ONLY, + param.POSITIONAL_OR_KEYWORD, + } + ] + + @functools.wraps(func) + def wrapper(*args: object, **kwargs: object) -> object: + given_params: set[str] = set() + for i, _ in enumerate(args): + try: + given_params.add(positional[i]) + except IndexError: + raise TypeError( + f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given" + ) from None + + for key in kwargs.keys(): + given_params.add(key) + + for variant in variants: + matches = all((param in given_params for param in variant)) + if matches: + break + else: # no break + if len(variants) > 1: + variations = human_join( + ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] + ) + msg = f"Missing required arguments; Expected either {variations} arguments to be given" + else: + assert len(variants) > 0 + + # TODO: this error message is not deterministic + missing = list(set(variants[0]) - given_params) + if len(missing) > 1: + msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}" + else: + msg = f"Missing required argument: {quote(missing[0])}" + raise TypeError(msg) + return func(*args, **kwargs) + + return wrapper # type: ignore + + return inner + + +_K = TypeVar("_K") +_V = TypeVar("_V") + + +@overload +def strip_not_given(obj: None) -> None: + ... + + +@overload +def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: + ... + + +@overload +def strip_not_given(obj: object) -> object: + ... + + +def strip_not_given(obj: object | None) -> object: + """Remove all top-level keys where their values are instances of `NotGiven`""" + if obj is None: + return None + + if not is_mapping(obj): + return obj + + return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} + + +def coerce_integer(val: str) -> int: + return int(val, base=10) + + +def coerce_float(val: str) -> float: + return float(val) + + +def coerce_boolean(val: str) -> bool: + return val == "true" or val == "1" or val == "on" + + +def maybe_coerce_integer(val: str | None) -> int | None: + if val is None: + return None + return coerce_integer(val) + + +def maybe_coerce_float(val: str | None) -> float | None: + if val is None: + return None + return coerce_float(val) + + +def maybe_coerce_boolean(val: str | None) -> bool | None: + if val is None: + return None + return coerce_boolean(val) + + +def removeprefix(string: str, prefix: str) -> str: + """Remove a prefix from a string. + + Backport of `str.removeprefix` for Python < 3.9 + """ + if string.startswith(prefix): + return string[len(prefix) :] + return string + + +def removesuffix(string: str, suffix: str) -> str: + """Remove a suffix from a string. + + Backport of `str.removesuffix` for Python < 3.9 + """ + if string.endswith(suffix): + return string[: -len(suffix)] + return string + + +def file_from_path(path: str) -> FileTypes: + contents = Path(path).read_bytes() + file_name = os.path.basename(path) + return (file_name, contents) + + +def get_required_header(headers: HeadersLike, header: str) -> str: + lower_header = header.lower() + if isinstance(headers, Mapping): + headers = cast(Headers, headers) + for k, v in headers.items(): + if k.lower() == lower_header and isinstance(v, str): + return v + + """ to deal with the case where the header looks like Stainless-Event-Id """ + intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) + + for normalized_header in [header, lower_header, header.upper(), intercaps_header]: + value = headers.get(normalized_header) + if value: + return value + + raise ValueError(f"Could not find {header} header") + + +def get_async_library() -> str: + try: + return sniffio.current_async_library() + except Exception: + return "false" + + +def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]: + """A version of functools.lru_cache that retains the type signature + for the wrapped function arguments. + """ + wrapper = functools.lru_cache( # noqa: TID251 + maxsize=maxsize, + ) + return cast(Any, wrapper) # type: ignore[no-any-return] diff --git a/.venv/Lib/site-packages/openai/_version.py b/.venv/Lib/site-packages/openai/_version.py new file mode 100644 index 00000000..c4c92f77 --- /dev/null +++ b/.venv/Lib/site-packages/openai/_version.py @@ -0,0 +1,4 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +__title__ = "openai" +__version__ = "1.25.2" # x-release-please-version diff --git a/.venv/Lib/site-packages/openai/cli/__init__.py b/.venv/Lib/site-packages/openai/cli/__init__.py new file mode 100644 index 00000000..d453d5e1 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/__init__.py @@ -0,0 +1 @@ +from ._cli import main as main diff --git a/.venv/Lib/site-packages/openai/cli/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..57612ee9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/__pycache__/_cli.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/__pycache__/_cli.cpython-311.pyc new file mode 100644 index 00000000..39a8af2d Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/__pycache__/_cli.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/__pycache__/_errors.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/__pycache__/_errors.cpython-311.pyc new file mode 100644 index 00000000..14e1b071 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/__pycache__/_errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/__pycache__/_models.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/__pycache__/_models.cpython-311.pyc new file mode 100644 index 00000000..1fbbcc12 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/__pycache__/_models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/__pycache__/_progress.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/__pycache__/_progress.cpython-311.pyc new file mode 100644 index 00000000..e23b2fbf Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/__pycache__/_progress.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/__pycache__/_utils.cpython-311.pyc new file mode 100644 index 00000000..b9aa729b Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/__init__.py b/.venv/Lib/site-packages/openai/cli/_api/__init__.py new file mode 100644 index 00000000..56a0260a --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/__init__.py @@ -0,0 +1 @@ +from ._main import register_commands as register_commands diff --git a/.venv/Lib/site-packages/openai/cli/_api/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..7108e34a Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/__pycache__/_main.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/_main.cpython-311.pyc new file mode 100644 index 00000000..e9e77cd8 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/_main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/__pycache__/audio.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/audio.cpython-311.pyc new file mode 100644 index 00000000..371acaa9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/audio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/__pycache__/completions.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/completions.cpython-311.pyc new file mode 100644 index 00000000..a5ff4975 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/completions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/__pycache__/files.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/files.cpython-311.pyc new file mode 100644 index 00000000..afee7eaf Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/files.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/__pycache__/image.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/image.cpython-311.pyc new file mode 100644 index 00000000..e1a0168e Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/image.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/models.cpython-311.pyc new file mode 100644 index 00000000..1774b377 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/_main.py b/.venv/Lib/site-packages/openai/cli/_api/_main.py new file mode 100644 index 00000000..fe5a5e6f --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/_main.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from argparse import ArgumentParser + +from . import chat, audio, files, image, models, completions + + +def register_commands(parser: ArgumentParser) -> None: + subparsers = parser.add_subparsers(help="All API subcommands") + + chat.register(subparsers) + image.register(subparsers) + audio.register(subparsers) + files.register(subparsers) + models.register(subparsers) + completions.register(subparsers) diff --git a/.venv/Lib/site-packages/openai/cli/_api/audio.py b/.venv/Lib/site-packages/openai/cli/_api/audio.py new file mode 100644 index 00000000..90d21b99 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/audio.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Optional, cast +from argparse import ArgumentParser + +from .._utils import get_client, print_model +from ..._types import NOT_GIVEN +from .._models import BaseModel +from .._progress import BufferReader + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + # transcriptions + sub = subparser.add_parser("audio.transcriptions.create") + + # Required + sub.add_argument("-m", "--model", type=str, default="whisper-1") + sub.add_argument("-f", "--file", type=str, required=True) + # Optional + sub.add_argument("--response-format", type=str) + sub.add_argument("--language", type=str) + sub.add_argument("-t", "--temperature", type=float) + sub.add_argument("--prompt", type=str) + sub.set_defaults(func=CLIAudio.transcribe, args_model=CLITranscribeArgs) + + # translations + sub = subparser.add_parser("audio.translations.create") + + # Required + sub.add_argument("-f", "--file", type=str, required=True) + # Optional + sub.add_argument("-m", "--model", type=str, default="whisper-1") + sub.add_argument("--response-format", type=str) + # TODO: doesn't seem to be supported by the API + # sub.add_argument("--language", type=str) + sub.add_argument("-t", "--temperature", type=float) + sub.add_argument("--prompt", type=str) + sub.set_defaults(func=CLIAudio.translate, args_model=CLITranslationArgs) + + +class CLITranscribeArgs(BaseModel): + model: str + file: str + response_format: Optional[str] = None + language: Optional[str] = None + temperature: Optional[float] = None + prompt: Optional[str] = None + + +class CLITranslationArgs(BaseModel): + model: str + file: str + response_format: Optional[str] = None + language: Optional[str] = None + temperature: Optional[float] = None + prompt: Optional[str] = None + + +class CLIAudio: + @staticmethod + def transcribe(args: CLITranscribeArgs) -> None: + with open(args.file, "rb") as file_reader: + buffer_reader = BufferReader(file_reader.read(), desc="Upload progress") + + model = get_client().audio.transcriptions.create( + file=(args.file, buffer_reader), + model=args.model, + language=args.language or NOT_GIVEN, + temperature=args.temperature or NOT_GIVEN, + prompt=args.prompt or NOT_GIVEN, + # casts required because the API is typed for enums + # but we don't want to validate that here for forwards-compat + response_format=cast(Any, args.response_format), + ) + print_model(model) + + @staticmethod + def translate(args: CLITranslationArgs) -> None: + with open(args.file, "rb") as file_reader: + buffer_reader = BufferReader(file_reader.read(), desc="Upload progress") + + model = get_client().audio.translations.create( + file=(args.file, buffer_reader), + model=args.model, + temperature=args.temperature or NOT_GIVEN, + prompt=args.prompt or NOT_GIVEN, + # casts required because the API is typed for enums + # but we don't want to validate that here for forwards-compat + response_format=cast(Any, args.response_format), + ) + print_model(model) diff --git a/.venv/Lib/site-packages/openai/cli/_api/chat/__init__.py b/.venv/Lib/site-packages/openai/cli/_api/chat/__init__.py new file mode 100644 index 00000000..87d97163 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/chat/__init__.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING +from argparse import ArgumentParser + +from . import completions + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + completions.register(subparser) diff --git a/.venv/Lib/site-packages/openai/cli/_api/chat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/chat/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..28c6344a Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/chat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/chat/__pycache__/completions.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_api/chat/__pycache__/completions.cpython-311.pyc new file mode 100644 index 00000000..cde316e5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_api/chat/__pycache__/completions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_api/chat/completions.py b/.venv/Lib/site-packages/openai/cli/_api/chat/completions.py new file mode 100644 index 00000000..c299741f --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/chat/completions.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, List, Optional, cast +from argparse import ArgumentParser +from typing_extensions import Literal, NamedTuple + +from ..._utils import get_client +from ..._models import BaseModel +from ...._streaming import Stream +from ....types.chat import ( + ChatCompletionRole, + ChatCompletionChunk, + CompletionCreateParams, +) +from ....types.chat.completion_create_params import ( + CompletionCreateParamsStreaming, + CompletionCreateParamsNonStreaming, +) + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + sub = subparser.add_parser("chat.completions.create") + + sub._action_groups.pop() + req = sub.add_argument_group("required arguments") + opt = sub.add_argument_group("optional arguments") + + req.add_argument( + "-g", + "--message", + action="append", + nargs=2, + metavar=("ROLE", "CONTENT"), + help="A message in `{role} {content}` format. Use this argument multiple times to add multiple messages.", + required=True, + ) + req.add_argument( + "-m", + "--model", + help="The model to use.", + required=True, + ) + + opt.add_argument( + "-n", + "--n", + help="How many completions to generate for the conversation.", + type=int, + ) + opt.add_argument("-M", "--max-tokens", help="The maximum number of tokens to generate.", type=int) + opt.add_argument( + "-t", + "--temperature", + help="""What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. + +Mutually exclusive with `top_p`.""", + type=float, + ) + opt.add_argument( + "-P", + "--top_p", + help="""An alternative to sampling with temperature, called nucleus sampling, where the considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%% probability mass are considered. + + Mutually exclusive with `temperature`.""", + type=float, + ) + opt.add_argument( + "--stop", + help="A stop sequence at which to stop generating tokens for the message.", + ) + opt.add_argument("--stream", help="Stream messages as they're ready.", action="store_true") + sub.set_defaults(func=CLIChatCompletion.create, args_model=CLIChatCompletionCreateArgs) + + +class CLIMessage(NamedTuple): + role: ChatCompletionRole + content: str + + +class CLIChatCompletionCreateArgs(BaseModel): + message: List[CLIMessage] + model: str + n: Optional[int] = None + max_tokens: Optional[int] = None + temperature: Optional[float] = None + top_p: Optional[float] = None + stop: Optional[str] = None + stream: bool = False + + +class CLIChatCompletion: + @staticmethod + def create(args: CLIChatCompletionCreateArgs) -> None: + params: CompletionCreateParams = { + "model": args.model, + "messages": [ + {"role": cast(Literal["user"], message.role), "content": message.content} for message in args.message + ], + "n": args.n, + "temperature": args.temperature, + "top_p": args.top_p, + "stop": args.stop, + # type checkers are not good at inferring union types so we have to set stream afterwards + "stream": False, + } + if args.stream: + params["stream"] = args.stream # type: ignore + if args.max_tokens is not None: + params["max_tokens"] = args.max_tokens + + if args.stream: + return CLIChatCompletion._stream_create(cast(CompletionCreateParamsStreaming, params)) + + return CLIChatCompletion._create(cast(CompletionCreateParamsNonStreaming, params)) + + @staticmethod + def _create(params: CompletionCreateParamsNonStreaming) -> None: + completion = get_client().chat.completions.create(**params) + should_print_header = len(completion.choices) > 1 + for choice in completion.choices: + if should_print_header: + sys.stdout.write("===== Chat Completion {} =====\n".format(choice.index)) + + content = choice.message.content if choice.message.content is not None else "None" + sys.stdout.write(content) + + if should_print_header or not content.endswith("\n"): + sys.stdout.write("\n") + + sys.stdout.flush() + + @staticmethod + def _stream_create(params: CompletionCreateParamsStreaming) -> None: + # cast is required for mypy + stream = cast( # pyright: ignore[reportUnnecessaryCast] + Stream[ChatCompletionChunk], get_client().chat.completions.create(**params) + ) + for chunk in stream: + should_print_header = len(chunk.choices) > 1 + for choice in chunk.choices: + if should_print_header: + sys.stdout.write("===== Chat Completion {} =====\n".format(choice.index)) + + content = choice.delta.content or "" + sys.stdout.write(content) + + if should_print_header: + sys.stdout.write("\n") + + sys.stdout.flush() + + sys.stdout.write("\n") diff --git a/.venv/Lib/site-packages/openai/cli/_api/completions.py b/.venv/Lib/site-packages/openai/cli/_api/completions.py new file mode 100644 index 00000000..cbdb35bf --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/completions.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Optional, cast +from argparse import ArgumentParser +from functools import partial + +from openai.types.completion import Completion + +from .._utils import get_client +from ..._types import NOT_GIVEN, NotGivenOr +from ..._utils import is_given +from .._errors import CLIError +from .._models import BaseModel +from ..._streaming import Stream + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + sub = subparser.add_parser("completions.create") + + # Required + sub.add_argument( + "-m", + "--model", + help="The model to use", + required=True, + ) + + # Optional + sub.add_argument("-p", "--prompt", help="An optional prompt to complete from") + sub.add_argument("--stream", help="Stream tokens as they're ready.", action="store_true") + sub.add_argument("-M", "--max-tokens", help="The maximum number of tokens to generate", type=int) + sub.add_argument( + "-t", + "--temperature", + help="""What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. + +Mutually exclusive with `top_p`.""", + type=float, + ) + sub.add_argument( + "-P", + "--top_p", + help="""An alternative to sampling with temperature, called nucleus sampling, where the considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%% probability mass are considered. + + Mutually exclusive with `temperature`.""", + type=float, + ) + sub.add_argument( + "-n", + "--n", + help="How many sub-completions to generate for each prompt.", + type=int, + ) + sub.add_argument( + "--logprobs", + help="Include the log probabilities on the `logprobs` most likely tokens, as well the chosen tokens. So for example, if `logprobs` is 10, the API will return a list of the 10 most likely tokens. If `logprobs` is 0, only the chosen tokens will have logprobs returned.", + type=int, + ) + sub.add_argument( + "--best_of", + help="Generates `best_of` completions server-side and returns the 'best' (the one with the highest log probability per token). Results cannot be streamed.", + type=int, + ) + sub.add_argument( + "--echo", + help="Echo back the prompt in addition to the completion", + action="store_true", + ) + sub.add_argument( + "--frequency_penalty", + help="Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.", + type=float, + ) + sub.add_argument( + "--presence_penalty", + help="Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.", + type=float, + ) + sub.add_argument("--suffix", help="The suffix that comes after a completion of inserted text.") + sub.add_argument("--stop", help="A stop sequence at which to stop generating tokens.") + sub.add_argument( + "--user", + help="A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.", + ) + # TODO: add support for logit_bias + sub.set_defaults(func=CLICompletions.create, args_model=CLICompletionCreateArgs) + + +class CLICompletionCreateArgs(BaseModel): + model: str + stream: bool = False + + prompt: Optional[str] = None + n: NotGivenOr[int] = NOT_GIVEN + stop: NotGivenOr[str] = NOT_GIVEN + user: NotGivenOr[str] = NOT_GIVEN + echo: NotGivenOr[bool] = NOT_GIVEN + suffix: NotGivenOr[str] = NOT_GIVEN + best_of: NotGivenOr[int] = NOT_GIVEN + top_p: NotGivenOr[float] = NOT_GIVEN + logprobs: NotGivenOr[int] = NOT_GIVEN + max_tokens: NotGivenOr[int] = NOT_GIVEN + temperature: NotGivenOr[float] = NOT_GIVEN + presence_penalty: NotGivenOr[float] = NOT_GIVEN + frequency_penalty: NotGivenOr[float] = NOT_GIVEN + + +class CLICompletions: + @staticmethod + def create(args: CLICompletionCreateArgs) -> None: + if is_given(args.n) and args.n > 1 and args.stream: + raise CLIError("Can't stream completions with n>1 with the current CLI") + + make_request = partial( + get_client().completions.create, + n=args.n, + echo=args.echo, + stop=args.stop, + user=args.user, + model=args.model, + top_p=args.top_p, + prompt=args.prompt, + suffix=args.suffix, + best_of=args.best_of, + logprobs=args.logprobs, + max_tokens=args.max_tokens, + temperature=args.temperature, + presence_penalty=args.presence_penalty, + frequency_penalty=args.frequency_penalty, + ) + + if args.stream: + return CLICompletions._stream_create( + # mypy doesn't understand the `partial` function but pyright does + cast(Stream[Completion], make_request(stream=True)) # pyright: ignore[reportUnnecessaryCast] + ) + + return CLICompletions._create(make_request()) + + @staticmethod + def _create(completion: Completion) -> None: + should_print_header = len(completion.choices) > 1 + for choice in completion.choices: + if should_print_header: + sys.stdout.write("===== Completion {} =====\n".format(choice.index)) + + sys.stdout.write(choice.text) + + if should_print_header or not choice.text.endswith("\n"): + sys.stdout.write("\n") + + sys.stdout.flush() + + @staticmethod + def _stream_create(stream: Stream[Completion]) -> None: + for completion in stream: + should_print_header = len(completion.choices) > 1 + for choice in sorted(completion.choices, key=lambda c: c.index): + if should_print_header: + sys.stdout.write("===== Chat Completion {} =====\n".format(choice.index)) + + sys.stdout.write(choice.text) + + if should_print_header: + sys.stdout.write("\n") + + sys.stdout.flush() + + sys.stdout.write("\n") diff --git a/.venv/Lib/site-packages/openai/cli/_api/files.py b/.venv/Lib/site-packages/openai/cli/_api/files.py new file mode 100644 index 00000000..5f3631b2 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/files.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast +from argparse import ArgumentParser + +from .._utils import get_client, print_model +from .._models import BaseModel +from .._progress import BufferReader + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + sub = subparser.add_parser("files.create") + + sub.add_argument( + "-f", + "--file", + required=True, + help="File to upload", + ) + sub.add_argument( + "-p", + "--purpose", + help="Why are you uploading this file? (see https://platform.openai.com/docs/api-reference/ for purposes)", + required=True, + ) + sub.set_defaults(func=CLIFile.create, args_model=CLIFileCreateArgs) + + sub = subparser.add_parser("files.retrieve") + sub.add_argument("-i", "--id", required=True, help="The files ID") + sub.set_defaults(func=CLIFile.get, args_model=CLIFileCreateArgs) + + sub = subparser.add_parser("files.delete") + sub.add_argument("-i", "--id", required=True, help="The files ID") + sub.set_defaults(func=CLIFile.delete, args_model=CLIFileCreateArgs) + + sub = subparser.add_parser("files.list") + sub.set_defaults(func=CLIFile.list) + + +class CLIFileIDArgs(BaseModel): + id: str + + +class CLIFileCreateArgs(BaseModel): + file: str + purpose: str + + +class CLIFile: + @staticmethod + def create(args: CLIFileCreateArgs) -> None: + with open(args.file, "rb") as file_reader: + buffer_reader = BufferReader(file_reader.read(), desc="Upload progress") + + file = get_client().files.create( + file=(args.file, buffer_reader), + # casts required because the API is typed for enums + # but we don't want to validate that here for forwards-compat + purpose=cast(Any, args.purpose), + ) + print_model(file) + + @staticmethod + def get(args: CLIFileIDArgs) -> None: + file = get_client().files.retrieve(file_id=args.id) + print_model(file) + + @staticmethod + def delete(args: CLIFileIDArgs) -> None: + file = get_client().files.delete(file_id=args.id) + print_model(file) + + @staticmethod + def list() -> None: + files = get_client().files.list() + for file in files: + print_model(file) diff --git a/.venv/Lib/site-packages/openai/cli/_api/image.py b/.venv/Lib/site-packages/openai/cli/_api/image.py new file mode 100644 index 00000000..3e2a0a90 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/image.py @@ -0,0 +1,139 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast +from argparse import ArgumentParser + +from .._utils import get_client, print_model +from ..._types import NOT_GIVEN, NotGiven, NotGivenOr +from .._models import BaseModel +from .._progress import BufferReader + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + sub = subparser.add_parser("images.generate") + sub.add_argument("-m", "--model", type=str) + sub.add_argument("-p", "--prompt", type=str, required=True) + sub.add_argument("-n", "--num-images", type=int, default=1) + sub.add_argument("-s", "--size", type=str, default="1024x1024", help="Size of the output image") + sub.add_argument("--response-format", type=str, default="url") + sub.set_defaults(func=CLIImage.create, args_model=CLIImageCreateArgs) + + sub = subparser.add_parser("images.edit") + sub.add_argument("-m", "--model", type=str) + sub.add_argument("-p", "--prompt", type=str, required=True) + sub.add_argument("-n", "--num-images", type=int, default=1) + sub.add_argument( + "-I", + "--image", + type=str, + required=True, + help="Image to modify. Should be a local path and a PNG encoded image.", + ) + sub.add_argument("-s", "--size", type=str, default="1024x1024", help="Size of the output image") + sub.add_argument("--response-format", type=str, default="url") + sub.add_argument( + "-M", + "--mask", + type=str, + required=False, + help="Path to a mask image. It should be the same size as the image you're editing and a RGBA PNG image. The Alpha channel acts as the mask.", + ) + sub.set_defaults(func=CLIImage.edit, args_model=CLIImageEditArgs) + + sub = subparser.add_parser("images.create_variation") + sub.add_argument("-m", "--model", type=str) + sub.add_argument("-n", "--num-images", type=int, default=1) + sub.add_argument( + "-I", + "--image", + type=str, + required=True, + help="Image to modify. Should be a local path and a PNG encoded image.", + ) + sub.add_argument("-s", "--size", type=str, default="1024x1024", help="Size of the output image") + sub.add_argument("--response-format", type=str, default="url") + sub.set_defaults(func=CLIImage.create_variation, args_model=CLIImageCreateVariationArgs) + + +class CLIImageCreateArgs(BaseModel): + prompt: str + num_images: int + size: str + response_format: str + model: NotGivenOr[str] = NOT_GIVEN + + +class CLIImageCreateVariationArgs(BaseModel): + image: str + num_images: int + size: str + response_format: str + model: NotGivenOr[str] = NOT_GIVEN + + +class CLIImageEditArgs(BaseModel): + image: str + num_images: int + size: str + response_format: str + prompt: str + mask: NotGivenOr[str] = NOT_GIVEN + model: NotGivenOr[str] = NOT_GIVEN + + +class CLIImage: + @staticmethod + def create(args: CLIImageCreateArgs) -> None: + image = get_client().images.generate( + model=args.model, + prompt=args.prompt, + n=args.num_images, + # casts required because the API is typed for enums + # but we don't want to validate that here for forwards-compat + size=cast(Any, args.size), + response_format=cast(Any, args.response_format), + ) + print_model(image) + + @staticmethod + def create_variation(args: CLIImageCreateVariationArgs) -> None: + with open(args.image, "rb") as file_reader: + buffer_reader = BufferReader(file_reader.read(), desc="Upload progress") + + image = get_client().images.create_variation( + model=args.model, + image=("image", buffer_reader), + n=args.num_images, + # casts required because the API is typed for enums + # but we don't want to validate that here for forwards-compat + size=cast(Any, args.size), + response_format=cast(Any, args.response_format), + ) + print_model(image) + + @staticmethod + def edit(args: CLIImageEditArgs) -> None: + with open(args.image, "rb") as file_reader: + buffer_reader = BufferReader(file_reader.read(), desc="Image upload progress") + + if isinstance(args.mask, NotGiven): + mask: NotGivenOr[BufferReader] = NOT_GIVEN + else: + with open(args.mask, "rb") as file_reader: + mask = BufferReader(file_reader.read(), desc="Mask progress") + + image = get_client().images.edit( + model=args.model, + prompt=args.prompt, + image=("image", buffer_reader), + n=args.num_images, + mask=("mask", mask) if not isinstance(mask, NotGiven) else mask, + # casts required because the API is typed for enums + # but we don't want to validate that here for forwards-compat + size=cast(Any, args.size), + response_format=cast(Any, args.response_format), + ) + print_model(image) diff --git a/.venv/Lib/site-packages/openai/cli/_api/models.py b/.venv/Lib/site-packages/openai/cli/_api/models.py new file mode 100644 index 00000000..017218fa --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_api/models.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING +from argparse import ArgumentParser + +from .._utils import get_client, print_model +from .._models import BaseModel + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + sub = subparser.add_parser("models.list") + sub.set_defaults(func=CLIModels.list) + + sub = subparser.add_parser("models.retrieve") + sub.add_argument("-i", "--id", required=True, help="The model ID") + sub.set_defaults(func=CLIModels.get, args_model=CLIModelIDArgs) + + sub = subparser.add_parser("models.delete") + sub.add_argument("-i", "--id", required=True, help="The model ID") + sub.set_defaults(func=CLIModels.delete, args_model=CLIModelIDArgs) + + +class CLIModelIDArgs(BaseModel): + id: str + + +class CLIModels: + @staticmethod + def get(args: CLIModelIDArgs) -> None: + model = get_client().models.retrieve(model=args.id) + print_model(model) + + @staticmethod + def delete(args: CLIModelIDArgs) -> None: + model = get_client().models.delete(model=args.id) + print_model(model) + + @staticmethod + def list() -> None: + models = get_client().models.list() + for model in models: + print_model(model) diff --git a/.venv/Lib/site-packages/openai/cli/_cli.py b/.venv/Lib/site-packages/openai/cli/_cli.py new file mode 100644 index 00000000..72e5c923 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_cli.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import sys +import logging +import argparse +from typing import Any, List, Type, Optional +from typing_extensions import ClassVar + +import httpx +import pydantic + +import openai + +from . import _tools +from .. import _ApiType, __version__ +from ._api import register_commands +from ._utils import can_use_http2 +from .._types import ProxiesDict +from ._errors import CLIError, display_error +from .._compat import PYDANTIC_V2, ConfigDict, model_parse +from .._models import BaseModel +from .._exceptions import APIError + +logger = logging.getLogger() +formatter = logging.Formatter("[%(asctime)s] %(message)s") +handler = logging.StreamHandler(sys.stderr) +handler.setFormatter(formatter) +logger.addHandler(handler) + + +class Arguments(BaseModel): + if PYDANTIC_V2: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="ignore", + ) + else: + + class Config(pydantic.BaseConfig): # type: ignore + extra: Any = pydantic.Extra.ignore # type: ignore + + verbosity: int + version: Optional[str] = None + + api_key: Optional[str] + api_base: Optional[str] + organization: Optional[str] + proxy: Optional[List[str]] + api_type: Optional[_ApiType] = None + api_version: Optional[str] = None + + # azure + azure_endpoint: Optional[str] = None + azure_ad_token: Optional[str] = None + + # internal, set by subparsers to parse their specific args + args_model: Optional[Type[BaseModel]] = None + + # internal, used so that subparsers can forward unknown arguments + unknown_args: List[str] = [] + allow_unknown_args: bool = False + + +def _build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=None, prog="openai") + parser.add_argument( + "-v", + "--verbose", + action="count", + dest="verbosity", + default=0, + help="Set verbosity.", + ) + parser.add_argument("-b", "--api-base", help="What API base url to use.") + parser.add_argument("-k", "--api-key", help="What API key to use.") + parser.add_argument("-p", "--proxy", nargs="+", help="What proxy to use.") + parser.add_argument( + "-o", + "--organization", + help="Which organization to run as (will use your default organization if not specified)", + ) + parser.add_argument( + "-t", + "--api-type", + type=str, + choices=("openai", "azure"), + help="The backend API to call, must be `openai` or `azure`", + ) + parser.add_argument( + "--api-version", + help="The Azure API version, e.g. 'https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning'", + ) + + # azure + parser.add_argument( + "--azure-endpoint", + help="The Azure endpoint, e.g. 'https://endpoint.openai.azure.com'", + ) + parser.add_argument( + "--azure-ad-token", + help="A token from Azure Active Directory, https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id", + ) + + # prints the package version + parser.add_argument( + "-V", + "--version", + action="version", + version="%(prog)s " + __version__, + ) + + def help() -> None: + parser.print_help() + + parser.set_defaults(func=help) + + subparsers = parser.add_subparsers() + sub_api = subparsers.add_parser("api", help="Direct API calls") + + register_commands(sub_api) + + sub_tools = subparsers.add_parser("tools", help="Client side tools for convenience") + _tools.register_commands(sub_tools, subparsers) + + return parser + + +def main() -> int: + try: + _main() + except (APIError, CLIError, pydantic.ValidationError) as err: + display_error(err) + return 1 + except KeyboardInterrupt: + sys.stderr.write("\n") + return 1 + return 0 + + +def _parse_args(parser: argparse.ArgumentParser) -> tuple[argparse.Namespace, Arguments, list[str]]: + # argparse by default will strip out the `--` but we want to keep it for unknown arguments + if "--" in sys.argv: + idx = sys.argv.index("--") + known_args = sys.argv[1:idx] + unknown_args = sys.argv[idx:] + else: + known_args = sys.argv[1:] + unknown_args = [] + + parsed, remaining_unknown = parser.parse_known_args(known_args) + + # append any remaining unknown arguments from the initial parsing + remaining_unknown.extend(unknown_args) + + args = model_parse(Arguments, vars(parsed)) + if not args.allow_unknown_args: + # we have to parse twice to ensure any unknown arguments + # result in an error if that behaviour is desired + parser.parse_args() + + return parsed, args, remaining_unknown + + +def _main() -> None: + parser = _build_parser() + parsed, args, unknown = _parse_args(parser) + + if args.verbosity != 0: + sys.stderr.write("Warning: --verbosity isn't supported yet\n") + + proxies: ProxiesDict = {} + if args.proxy is not None: + for proxy in args.proxy: + key = "https://" if proxy.startswith("https") else "http://" + if key in proxies: + raise CLIError(f"Multiple {key} proxies given - only the last one would be used") + + proxies[key] = proxy + + http_client = httpx.Client( + proxies=proxies or None, + http2=can_use_http2(), + ) + openai.http_client = http_client + + if args.organization: + openai.organization = args.organization + + if args.api_key: + openai.api_key = args.api_key + + if args.api_base: + openai.base_url = args.api_base + + # azure + if args.api_type is not None: + openai.api_type = args.api_type + + if args.azure_endpoint is not None: + openai.azure_endpoint = args.azure_endpoint + + if args.api_version is not None: + openai.api_version = args.api_version + + if args.azure_ad_token is not None: + openai.azure_ad_token = args.azure_ad_token + + try: + if args.args_model: + parsed.func( + model_parse( + args.args_model, + { + **{ + # we omit None values so that they can be defaulted to `NotGiven` + # and we'll strip it from the API request + key: value + for key, value in vars(parsed).items() + if value is not None + }, + "unknown_args": unknown, + }, + ) + ) + else: + parsed.func() + finally: + try: + http_client.close() + except Exception: + pass + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.venv/Lib/site-packages/openai/cli/_errors.py b/.venv/Lib/site-packages/openai/cli/_errors.py new file mode 100644 index 00000000..2bf06070 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_errors.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +import sys + +import pydantic + +from ._utils import Colors, organization_info +from .._exceptions import APIError, OpenAIError + + +class CLIError(OpenAIError): + ... + + +class SilentCLIError(CLIError): + ... + + +def display_error(err: CLIError | APIError | pydantic.ValidationError) -> None: + if isinstance(err, SilentCLIError): + return + + sys.stderr.write("{}{}Error:{} {}\n".format(organization_info(), Colors.FAIL, Colors.ENDC, err)) diff --git a/.venv/Lib/site-packages/openai/cli/_models.py b/.venv/Lib/site-packages/openai/cli/_models.py new file mode 100644 index 00000000..5583db26 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_models.py @@ -0,0 +1,17 @@ +from typing import Any +from typing_extensions import ClassVar + +import pydantic + +from .. import _models +from .._compat import PYDANTIC_V2, ConfigDict + + +class BaseModel(_models.BaseModel): + if PYDANTIC_V2: + model_config: ClassVar[ConfigDict] = ConfigDict(extra="ignore", arbitrary_types_allowed=True) + else: + + class Config(pydantic.BaseConfig): # type: ignore + extra: Any = pydantic.Extra.ignore # type: ignore + arbitrary_types_allowed: bool = True diff --git a/.venv/Lib/site-packages/openai/cli/_progress.py b/.venv/Lib/site-packages/openai/cli/_progress.py new file mode 100644 index 00000000..8a7f2525 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_progress.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import io +from typing import Callable +from typing_extensions import override + + +class CancelledError(Exception): + def __init__(self, msg: str) -> None: + self.msg = msg + super().__init__(msg) + + @override + def __str__(self) -> str: + return self.msg + + __repr__ = __str__ + + +class BufferReader(io.BytesIO): + def __init__(self, buf: bytes = b"", desc: str | None = None) -> None: + super().__init__(buf) + self._len = len(buf) + self._progress = 0 + self._callback = progress(len(buf), desc=desc) + + def __len__(self) -> int: + return self._len + + @override + def read(self, n: int | None = -1) -> bytes: + chunk = io.BytesIO.read(self, n) + self._progress += len(chunk) + + try: + self._callback(self._progress) + except Exception as e: # catches exception from the callback + raise CancelledError("The upload was cancelled: {}".format(e)) from e + + return chunk + + +def progress(total: float, desc: str | None) -> Callable[[float], None]: + import tqdm + + meter = tqdm.tqdm(total=total, unit_scale=True, desc=desc) + + def incr(progress: float) -> None: + meter.n = progress + if progress == total: + meter.close() + else: + meter.refresh() + + return incr + + +def MB(i: int) -> int: + return int(i // 1024**2) diff --git a/.venv/Lib/site-packages/openai/cli/_tools/__init__.py b/.venv/Lib/site-packages/openai/cli/_tools/__init__.py new file mode 100644 index 00000000..56a0260a --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_tools/__init__.py @@ -0,0 +1 @@ +from ._main import register_commands as register_commands diff --git a/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..85a3e5cc Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/_main.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/_main.cpython-311.pyc new file mode 100644 index 00000000..becb39bb Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/_main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/fine_tunes.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/fine_tunes.cpython-311.pyc new file mode 100644 index 00000000..4beba828 Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/fine_tunes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/migrate.cpython-311.pyc b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/migrate.cpython-311.pyc new file mode 100644 index 00000000..f427797e Binary files /dev/null and b/.venv/Lib/site-packages/openai/cli/_tools/__pycache__/migrate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/cli/_tools/_main.py b/.venv/Lib/site-packages/openai/cli/_tools/_main.py new file mode 100644 index 00000000..bd6cda40 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_tools/_main.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING +from argparse import ArgumentParser + +from . import migrate, fine_tunes + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register_commands(parser: ArgumentParser, subparser: _SubParsersAction[ArgumentParser]) -> None: + migrate.register(subparser) + + namespaced = parser.add_subparsers(title="Tools", help="Convenience client side tools") + + fine_tunes.register(namespaced) diff --git a/.venv/Lib/site-packages/openai/cli/_tools/fine_tunes.py b/.venv/Lib/site-packages/openai/cli/_tools/fine_tunes.py new file mode 100644 index 00000000..2128b889 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_tools/fine_tunes.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING +from argparse import ArgumentParser + +from .._models import BaseModel +from ...lib._validators import ( + get_validators, + write_out_file, + read_any_format, + apply_validators, + apply_necessary_remediation, +) + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + sub = subparser.add_parser("fine_tunes.prepare_data") + sub.add_argument( + "-f", + "--file", + required=True, + help="JSONL, JSON, CSV, TSV, TXT or XLSX file containing prompt-completion examples to be analyzed." + "This should be the local file path.", + ) + sub.add_argument( + "-q", + "--quiet", + required=False, + action="store_true", + help="Auto accepts all suggestions, without asking for user input. To be used within scripts.", + ) + sub.set_defaults(func=prepare_data, args_model=PrepareDataArgs) + + +class PrepareDataArgs(BaseModel): + file: str + + quiet: bool + + +def prepare_data(args: PrepareDataArgs) -> None: + sys.stdout.write("Analyzing...\n") + fname = args.file + auto_accept = args.quiet + df, remediation = read_any_format(fname) + apply_necessary_remediation(None, remediation) + + validators = get_validators() + + assert df is not None + + apply_validators( + df, + fname, + remediation, + validators, + auto_accept, + write_out_file_func=write_out_file, + ) diff --git a/.venv/Lib/site-packages/openai/cli/_tools/migrate.py b/.venv/Lib/site-packages/openai/cli/_tools/migrate.py new file mode 100644 index 00000000..53073b86 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_tools/migrate.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +import os +import sys +import json +import shutil +import tarfile +import platform +import subprocess +from typing import TYPE_CHECKING, List +from pathlib import Path +from argparse import ArgumentParser + +import httpx + +from .._errors import CLIError, SilentCLIError +from .._models import BaseModel + +if TYPE_CHECKING: + from argparse import _SubParsersAction + + +def register(subparser: _SubParsersAction[ArgumentParser]) -> None: + sub = subparser.add_parser("migrate") + sub.set_defaults(func=migrate, args_model=MigrateArgs, allow_unknown_args=True) + + sub = subparser.add_parser("grit") + sub.set_defaults(func=grit, args_model=GritArgs, allow_unknown_args=True) + + +class GritArgs(BaseModel): + # internal + unknown_args: List[str] = [] + + +def grit(args: GritArgs) -> None: + grit_path = install() + + try: + subprocess.check_call([grit_path, *args.unknown_args]) + except subprocess.CalledProcessError: + # stdout and stderr are forwarded by subprocess so an error will already + # have been displayed + raise SilentCLIError() from None + + +class MigrateArgs(BaseModel): + # internal + unknown_args: List[str] = [] + + +def migrate(args: MigrateArgs) -> None: + grit_path = install() + + try: + subprocess.check_call([grit_path, "apply", "openai", *args.unknown_args]) + except subprocess.CalledProcessError: + # stdout and stderr are forwarded by subprocess so an error will already + # have been displayed + raise SilentCLIError() from None + + +# handles downloading the Grit CLI until they provide their own PyPi package + +KEYGEN_ACCOUNT = "custodian-dev" + + +def _cache_dir() -> Path: + xdg = os.environ.get("XDG_CACHE_HOME") + if xdg is not None: + return Path(xdg) + + return Path.home() / ".cache" + + +def _debug(message: str) -> None: + if not os.environ.get("DEBUG"): + return + + sys.stdout.write(f"[DEBUG]: {message}\n") + + +def install() -> Path: + """Installs the Grit CLI and returns the location of the binary""" + if sys.platform == "win32": + raise CLIError("Windows is not supported yet in the migration CLI") + + platform = "macos" if sys.platform == "darwin" else "linux" + + dir_name = _cache_dir() / "openai-python" + install_dir = dir_name / ".install" + target_dir = install_dir / "bin" + + target_path = target_dir / "marzano" + temp_file = target_dir / "marzano.tmp" + + if target_path.exists(): + _debug(f"{target_path} already exists") + sys.stdout.flush() + return target_path + + _debug(f"Using Grit CLI path: {target_path}") + + target_dir.mkdir(parents=True, exist_ok=True) + + if temp_file.exists(): + temp_file.unlink() + + arch = _get_arch() + _debug(f"Using architecture {arch}") + + file_name = f"marzano-{platform}-{arch}" + meta_url = f"https://api.keygen.sh/v1/accounts/{KEYGEN_ACCOUNT}/artifacts/{file_name}" + + sys.stdout.write(f"Retrieving Grit CLI metadata from {meta_url}\n") + with httpx.Client() as client: + response = client.get(meta_url) # pyright: ignore[reportUnknownMemberType] + + data = response.json() + errors = data.get("errors") + if errors: + for error in errors: + sys.stdout.write(f"{error}\n") + + raise CLIError("Could not locate Grit CLI binary - see above errors") + + write_manifest(install_dir, data["data"]["relationships"]["release"]["data"]["id"]) + + link = data["data"]["links"]["redirect"] + _debug(f"Redirect URL {link}") + + download_response = client.get(link) # pyright: ignore[reportUnknownMemberType] + with open(temp_file, "wb") as file: + for chunk in download_response.iter_bytes(): + file.write(chunk) + + unpacked_dir = target_dir / "cli-bin" + unpacked_dir.mkdir(parents=True, exist_ok=True) + + with tarfile.open(temp_file, "r:gz") as archive: + archive.extractall(unpacked_dir, filter="data") + + for item in unpacked_dir.iterdir(): + item.rename(target_dir / item.name) + + shutil.rmtree(unpacked_dir) + os.remove(temp_file) + os.chmod(target_path, 0o755) + + sys.stdout.flush() + + return target_path + + +def _get_arch() -> str: + architecture = platform.machine().lower() + + # Map the architecture names to Node.js equivalents + arch_map = { + "x86_64": "x64", + "amd64": "x64", + "armv7l": "arm", + "aarch64": "arm64", + } + + return arch_map.get(architecture, architecture) + + +def write_manifest(install_path: Path, release: str) -> None: + manifest = { + "installPath": str(install_path), + "binaries": { + "marzano": { + "name": "marzano", + "release": release, + }, + }, + } + manifest_path = Path(install_path) / "manifests.json" + with open(manifest_path, "w") as f: + json.dump(manifest, f, indent=2) diff --git a/.venv/Lib/site-packages/openai/cli/_utils.py b/.venv/Lib/site-packages/openai/cli/_utils.py new file mode 100644 index 00000000..673eed61 --- /dev/null +++ b/.venv/Lib/site-packages/openai/cli/_utils.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import sys + +import openai + +from .. import OpenAI, _load_client +from .._compat import model_json +from .._models import BaseModel + + +class Colors: + HEADER = "\033[95m" + OKBLUE = "\033[94m" + OKGREEN = "\033[92m" + WARNING = "\033[93m" + FAIL = "\033[91m" + ENDC = "\033[0m" + BOLD = "\033[1m" + UNDERLINE = "\033[4m" + + +def get_client() -> OpenAI: + return _load_client() + + +def organization_info() -> str: + organization = openai.organization + if organization is not None: + return "[organization={}] ".format(organization) + + return "" + + +def print_model(model: BaseModel) -> None: + sys.stdout.write(model_json(model, indent=2) + "\n") + + +def can_use_http2() -> bool: + try: + import h2 # type: ignore # noqa + except ImportError: + return False + + return True diff --git a/.venv/Lib/site-packages/openai/lib/.keep b/.venv/Lib/site-packages/openai/lib/.keep new file mode 100644 index 00000000..5e2c99fd --- /dev/null +++ b/.venv/Lib/site-packages/openai/lib/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store custom files to expand the SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/.venv/Lib/site-packages/openai/lib/__pycache__/_old_api.cpython-311.pyc b/.venv/Lib/site-packages/openai/lib/__pycache__/_old_api.cpython-311.pyc new file mode 100644 index 00000000..f216495b Binary files /dev/null and b/.venv/Lib/site-packages/openai/lib/__pycache__/_old_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/lib/__pycache__/_validators.cpython-311.pyc b/.venv/Lib/site-packages/openai/lib/__pycache__/_validators.cpython-311.pyc new file mode 100644 index 00000000..51bd2064 Binary files /dev/null and b/.venv/Lib/site-packages/openai/lib/__pycache__/_validators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/lib/__pycache__/azure.cpython-311.pyc b/.venv/Lib/site-packages/openai/lib/__pycache__/azure.cpython-311.pyc new file mode 100644 index 00000000..b9e5a97b Binary files /dev/null and b/.venv/Lib/site-packages/openai/lib/__pycache__/azure.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/lib/_old_api.py b/.venv/Lib/site-packages/openai/lib/_old_api.py new file mode 100644 index 00000000..929c87e8 --- /dev/null +++ b/.venv/Lib/site-packages/openai/lib/_old_api.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any +from typing_extensions import override + +from .._utils import LazyProxy +from .._exceptions import OpenAIError + +INSTRUCTIONS = """ + +You tried to access openai.{symbol}, but this is no longer supported in openai>=1.0.0 - see the README at https://github.com/openai/openai-python for the API. + +You can run `openai migrate` to automatically upgrade your codebase to use the 1.0.0 interface. + +Alternatively, you can pin your installation to the old version, e.g. `pip install openai==0.28` + +A detailed migration guide is available here: https://github.com/openai/openai-python/discussions/742 +""" + + +class APIRemovedInV1(OpenAIError): + def __init__(self, *, symbol: str) -> None: + super().__init__(INSTRUCTIONS.format(symbol=symbol)) + + +class APIRemovedInV1Proxy(LazyProxy[Any]): + def __init__(self, *, symbol: str) -> None: + super().__init__() + self._symbol = symbol + + @override + def __load__(self) -> Any: + # return the proxy until it is eventually called so that + # we don't break people that are just checking the attributes + # of a module + return self + + def __call__(self, *_args: Any, **_kwargs: Any) -> Any: + raise APIRemovedInV1(symbol=self._symbol) + + +SYMBOLS = [ + "Edit", + "File", + "Audio", + "Image", + "Model", + "Engine", + "Customer", + "FineTune", + "Embedding", + "Completion", + "Deployment", + "Moderation", + "ErrorObject", + "FineTuningJob", + "ChatCompletion", +] + +# we explicitly tell type checkers that nothing is exported +# from this file so that when we re-export the old symbols +# in `openai/__init__.py` they aren't added to the auto-complete +# suggestions given by editors +if TYPE_CHECKING: + __all__: list[str] = [] +else: + __all__ = SYMBOLS + + +__locals = locals() +for symbol in SYMBOLS: + __locals[symbol] = APIRemovedInV1Proxy(symbol=symbol) diff --git a/.venv/Lib/site-packages/openai/lib/_validators.py b/.venv/Lib/site-packages/openai/lib/_validators.py new file mode 100644 index 00000000..cf24cd22 --- /dev/null +++ b/.venv/Lib/site-packages/openai/lib/_validators.py @@ -0,0 +1,809 @@ +# pyright: basic +from __future__ import annotations + +import os +import sys +from typing import Any, TypeVar, Callable, Optional, NamedTuple +from typing_extensions import TypeAlias + +from .._extras import pandas as pd + + +class Remediation(NamedTuple): + name: str + immediate_msg: Optional[str] = None + necessary_msg: Optional[str] = None + necessary_fn: Optional[Callable[[Any], Any]] = None + optional_msg: Optional[str] = None + optional_fn: Optional[Callable[[Any], Any]] = None + error_msg: Optional[str] = None + + +OptionalDataFrameT = TypeVar("OptionalDataFrameT", bound="Optional[pd.DataFrame]") + + +def num_examples_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will only print out the number of examples and recommend to the user to increase the number of examples if less than 100. + """ + MIN_EXAMPLES = 100 + optional_suggestion = ( + "" + if len(df) >= MIN_EXAMPLES + else ". In general, we recommend having at least a few hundred examples. We've found that performance tends to linearly increase for every doubling of the number of examples" + ) + immediate_msg = f"\n- Your file contains {len(df)} prompt-completion pairs{optional_suggestion}" + return Remediation(name="num_examples", immediate_msg=immediate_msg) + + +def necessary_column_validator(df: pd.DataFrame, necessary_column: str) -> Remediation: + """ + This validator will ensure that the necessary column is present in the dataframe. + """ + + def lower_case_column(df: pd.DataFrame, column: Any) -> pd.DataFrame: + cols = [c for c in df.columns if str(c).lower() == column] + df.rename(columns={cols[0]: column.lower()}, inplace=True) + return df + + immediate_msg = None + necessary_fn = None + necessary_msg = None + error_msg = None + + if necessary_column not in df.columns: + if necessary_column in [str(c).lower() for c in df.columns]: + + def lower_case_column_creator(df: pd.DataFrame) -> pd.DataFrame: + return lower_case_column(df, necessary_column) + + necessary_fn = lower_case_column_creator + immediate_msg = f"\n- The `{necessary_column}` column/key should be lowercase" + necessary_msg = f"Lower case column name to `{necessary_column}`" + else: + error_msg = f"`{necessary_column}` column/key is missing. Please make sure you name your columns/keys appropriately, then retry" + + return Remediation( + name="necessary_column", + immediate_msg=immediate_msg, + necessary_msg=necessary_msg, + necessary_fn=necessary_fn, + error_msg=error_msg, + ) + + +def additional_column_validator(df: pd.DataFrame, fields: list[str] = ["prompt", "completion"]) -> Remediation: + """ + This validator will remove additional columns from the dataframe. + """ + additional_columns = [] + necessary_msg = None + immediate_msg = None + necessary_fn = None # type: ignore + + if len(df.columns) > 2: + additional_columns = [c for c in df.columns if c not in fields] + warn_message = "" + for ac in additional_columns: + dups = [c for c in additional_columns if ac in c] + if len(dups) > 0: + warn_message += f"\n WARNING: Some of the additional columns/keys contain `{ac}` in their name. These will be ignored, and the column/key `{ac}` will be used instead. This could also result from a duplicate column/key in the provided file." + immediate_msg = f"\n- The input file should contain exactly two columns/keys per row. Additional columns/keys present are: {additional_columns}{warn_message}" + necessary_msg = f"Remove additional columns/keys: {additional_columns}" + + def necessary_fn(x: Any) -> Any: + return x[fields] + + return Remediation( + name="additional_column", + immediate_msg=immediate_msg, + necessary_msg=necessary_msg, + necessary_fn=necessary_fn, + ) + + +def non_empty_field_validator(df: pd.DataFrame, field: str = "completion") -> Remediation: + """ + This validator will ensure that no completion is empty. + """ + necessary_msg = None + necessary_fn = None # type: ignore + immediate_msg = None + + if df[field].apply(lambda x: x == "").any() or df[field].isnull().any(): + empty_rows = (df[field] == "") | (df[field].isnull()) + empty_indexes = df.reset_index().index[empty_rows].tolist() + immediate_msg = f"\n- `{field}` column/key should not contain empty strings. These are rows: {empty_indexes}" + + def necessary_fn(x: Any) -> Any: + return x[x[field] != ""].dropna(subset=[field]) + + necessary_msg = f"Remove {len(empty_indexes)} rows with empty {field}s" + + return Remediation( + name=f"empty_{field}", + immediate_msg=immediate_msg, + necessary_msg=necessary_msg, + necessary_fn=necessary_fn, + ) + + +def duplicated_rows_validator(df: pd.DataFrame, fields: list[str] = ["prompt", "completion"]) -> Remediation: + """ + This validator will suggest to the user to remove duplicate rows if they exist. + """ + duplicated_rows = df.duplicated(subset=fields) + duplicated_indexes = df.reset_index().index[duplicated_rows].tolist() + immediate_msg = None + optional_msg = None + optional_fn = None # type: ignore + + if len(duplicated_indexes) > 0: + immediate_msg = f"\n- There are {len(duplicated_indexes)} duplicated {'-'.join(fields)} sets. These are rows: {duplicated_indexes}" + optional_msg = f"Remove {len(duplicated_indexes)} duplicate rows" + + def optional_fn(x: Any) -> Any: + return x.drop_duplicates(subset=fields) + + return Remediation( + name="duplicated_rows", + immediate_msg=immediate_msg, + optional_msg=optional_msg, + optional_fn=optional_fn, + ) + + +def long_examples_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will suggest to the user to remove examples that are too long. + """ + immediate_msg = None + optional_msg = None + optional_fn = None # type: ignore + + ft_type = infer_task_type(df) + if ft_type != "open-ended generation": + + def get_long_indexes(d: pd.DataFrame) -> Any: + long_examples = d.apply(lambda x: len(x.prompt) + len(x.completion) > 10000, axis=1) + return d.reset_index().index[long_examples].tolist() + + long_indexes = get_long_indexes(df) + + if len(long_indexes) > 0: + immediate_msg = f"\n- There are {len(long_indexes)} examples that are very long. These are rows: {long_indexes}\nFor conditional generation, and for classification the examples shouldn't be longer than 2048 tokens." + optional_msg = f"Remove {len(long_indexes)} long examples" + + def optional_fn(x: Any) -> Any: + long_indexes_to_drop = get_long_indexes(x) + if long_indexes != long_indexes_to_drop: + sys.stdout.write( + f"The indices of the long examples has changed as a result of a previously applied recommendation.\nThe {len(long_indexes_to_drop)} long examples to be dropped are now at the following indices: {long_indexes_to_drop}\n" + ) + return x.drop(long_indexes_to_drop) + + return Remediation( + name="long_examples", + immediate_msg=immediate_msg, + optional_msg=optional_msg, + optional_fn=optional_fn, + ) + + +def common_prompt_suffix_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will suggest to add a common suffix to the prompt if one doesn't already exist in case of classification or conditional generation. + """ + error_msg = None + immediate_msg = None + optional_msg = None + optional_fn = None # type: ignore + + # Find a suffix which is not contained within the prompt otherwise + suggested_suffix = "\n\n### =>\n\n" + suffix_options = [ + " ->", + "\n\n###\n\n", + "\n\n===\n\n", + "\n\n---\n\n", + "\n\n===>\n\n", + "\n\n--->\n\n", + ] + for suffix_option in suffix_options: + if suffix_option == " ->": + if df.prompt.str.contains("\n").any(): + continue + if df.prompt.str.contains(suffix_option, regex=False).any(): + continue + suggested_suffix = suffix_option + break + display_suggested_suffix = suggested_suffix.replace("\n", "\\n") + + ft_type = infer_task_type(df) + if ft_type == "open-ended generation": + return Remediation(name="common_suffix") + + def add_suffix(x: Any, suffix: Any) -> Any: + x["prompt"] += suffix + return x + + common_suffix = get_common_xfix(df.prompt, xfix="suffix") + if (df.prompt == common_suffix).all(): + error_msg = f"All prompts are identical: `{common_suffix}`\nConsider leaving the prompts blank if you want to do open-ended generation, otherwise ensure prompts are different" + return Remediation(name="common_suffix", error_msg=error_msg) + + if common_suffix != "": + common_suffix_new_line_handled = common_suffix.replace("\n", "\\n") + immediate_msg = f"\n- All prompts end with suffix `{common_suffix_new_line_handled}`" + if len(common_suffix) > 10: + immediate_msg += f". This suffix seems very long. Consider replacing with a shorter suffix, such as `{display_suggested_suffix}`" + if df.prompt.str[: -len(common_suffix)].str.contains(common_suffix, regex=False).any(): + immediate_msg += f"\n WARNING: Some of your prompts contain the suffix `{common_suffix}` more than once. We strongly suggest that you review your prompts and add a unique suffix" + + else: + immediate_msg = "\n- Your data does not contain a common separator at the end of your prompts. Having a separator string appended to the end of the prompt makes it clearer to the fine-tuned model where the completion should begin. See https://platform.openai.com/docs/guides/fine-tuning/preparing-your-dataset for more detail and examples. If you intend to do open-ended generation, then you should leave the prompts empty" + + if common_suffix == "": + optional_msg = f"Add a suffix separator `{display_suggested_suffix}` to all prompts" + + def optional_fn(x: Any) -> Any: + return add_suffix(x, suggested_suffix) + + return Remediation( + name="common_completion_suffix", + immediate_msg=immediate_msg, + optional_msg=optional_msg, + optional_fn=optional_fn, + error_msg=error_msg, + ) + + +def common_prompt_prefix_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will suggest to remove a common prefix from the prompt if a long one exist. + """ + MAX_PREFIX_LEN = 12 + + immediate_msg = None + optional_msg = None + optional_fn = None # type: ignore + + common_prefix = get_common_xfix(df.prompt, xfix="prefix") + if common_prefix == "": + return Remediation(name="common_prefix") + + def remove_common_prefix(x: Any, prefix: Any) -> Any: + x["prompt"] = x["prompt"].str[len(prefix) :] + return x + + if (df.prompt == common_prefix).all(): + # already handled by common_suffix_validator + return Remediation(name="common_prefix") + + if common_prefix != "": + immediate_msg = f"\n- All prompts start with prefix `{common_prefix}`" + if MAX_PREFIX_LEN < len(common_prefix): + immediate_msg += ". Fine-tuning doesn't require the instruction specifying the task, or a few-shot example scenario. Most of the time you should only add the input data into the prompt, and the desired output into the completion" + optional_msg = f"Remove prefix `{common_prefix}` from all prompts" + + def optional_fn(x: Any) -> Any: + return remove_common_prefix(x, common_prefix) + + return Remediation( + name="common_prompt_prefix", + immediate_msg=immediate_msg, + optional_msg=optional_msg, + optional_fn=optional_fn, + ) + + +def common_completion_prefix_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will suggest to remove a common prefix from the completion if a long one exist. + """ + MAX_PREFIX_LEN = 5 + + common_prefix = get_common_xfix(df.completion, xfix="prefix") + ws_prefix = len(common_prefix) > 0 and common_prefix[0] == " " + if len(common_prefix) < MAX_PREFIX_LEN: + return Remediation(name="common_prefix") + + def remove_common_prefix(x: Any, prefix: Any, ws_prefix: Any) -> Any: + x["completion"] = x["completion"].str[len(prefix) :] + if ws_prefix: + # keep the single whitespace as prefix + x["completion"] = f" {x['completion']}" + return x + + if (df.completion == common_prefix).all(): + # already handled by common_suffix_validator + return Remediation(name="common_prefix") + + immediate_msg = f"\n- All completions start with prefix `{common_prefix}`. Most of the time you should only add the output data into the completion, without any prefix" + optional_msg = f"Remove prefix `{common_prefix}` from all completions" + + def optional_fn(x: Any) -> Any: + return remove_common_prefix(x, common_prefix, ws_prefix) + + return Remediation( + name="common_completion_prefix", + immediate_msg=immediate_msg, + optional_msg=optional_msg, + optional_fn=optional_fn, + ) + + +def common_completion_suffix_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will suggest to add a common suffix to the completion if one doesn't already exist in case of classification or conditional generation. + """ + error_msg = None + immediate_msg = None + optional_msg = None + optional_fn = None # type: ignore + + ft_type = infer_task_type(df) + if ft_type == "open-ended generation" or ft_type == "classification": + return Remediation(name="common_suffix") + + common_suffix = get_common_xfix(df.completion, xfix="suffix") + if (df.completion == common_suffix).all(): + error_msg = f"All completions are identical: `{common_suffix}`\nEnsure completions are different, otherwise the model will just repeat `{common_suffix}`" + return Remediation(name="common_suffix", error_msg=error_msg) + + # Find a suffix which is not contained within the completion otherwise + suggested_suffix = " [END]" + suffix_options = [ + "\n", + ".", + " END", + "***", + "+++", + "&&&", + "$$$", + "@@@", + "%%%", + ] + for suffix_option in suffix_options: + if df.completion.str.contains(suffix_option, regex=False).any(): + continue + suggested_suffix = suffix_option + break + display_suggested_suffix = suggested_suffix.replace("\n", "\\n") + + def add_suffix(x: Any, suffix: Any) -> Any: + x["completion"] += suffix + return x + + if common_suffix != "": + common_suffix_new_line_handled = common_suffix.replace("\n", "\\n") + immediate_msg = f"\n- All completions end with suffix `{common_suffix_new_line_handled}`" + if len(common_suffix) > 10: + immediate_msg += f". This suffix seems very long. Consider replacing with a shorter suffix, such as `{display_suggested_suffix}`" + if df.completion.str[: -len(common_suffix)].str.contains(common_suffix, regex=False).any(): + immediate_msg += f"\n WARNING: Some of your completions contain the suffix `{common_suffix}` more than once. We suggest that you review your completions and add a unique ending" + + else: + immediate_msg = "\n- Your data does not contain a common ending at the end of your completions. Having a common ending string appended to the end of the completion makes it clearer to the fine-tuned model where the completion should end. See https://platform.openai.com/docs/guides/fine-tuning/preparing-your-dataset for more detail and examples." + + if common_suffix == "": + optional_msg = f"Add a suffix ending `{display_suggested_suffix}` to all completions" + + def optional_fn(x: Any) -> Any: + return add_suffix(x, suggested_suffix) + + return Remediation( + name="common_completion_suffix", + immediate_msg=immediate_msg, + optional_msg=optional_msg, + optional_fn=optional_fn, + error_msg=error_msg, + ) + + +def completions_space_start_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will suggest to add a space at the start of the completion if it doesn't already exist. This helps with tokenization. + """ + + def add_space_start(x: Any) -> Any: + x["completion"] = x["completion"].apply(lambda s: ("" if s.startswith(" ") else " ") + s) + return x + + optional_msg = None + optional_fn = None + immediate_msg = None + + if df.completion.str[:1].nunique() != 1 or df.completion.values[0][0] != " ": + immediate_msg = "\n- The completion should start with a whitespace character (` `). This tends to produce better results due to the tokenization we use. See https://platform.openai.com/docs/guides/fine-tuning/preparing-your-dataset for more details" + optional_msg = "Add a whitespace character to the beginning of the completion" + optional_fn = add_space_start + return Remediation( + name="completion_space_start", + immediate_msg=immediate_msg, + optional_msg=optional_msg, + optional_fn=optional_fn, + ) + + +def lower_case_validator(df: pd.DataFrame, column: Any) -> Remediation | None: + """ + This validator will suggest to lowercase the column values, if more than a third of letters are uppercase. + """ + + def lower_case(x: Any) -> Any: + x[column] = x[column].str.lower() + return x + + count_upper = df[column].apply(lambda x: sum(1 for c in x if c.isalpha() and c.isupper())).sum() + count_lower = df[column].apply(lambda x: sum(1 for c in x if c.isalpha() and c.islower())).sum() + + if count_upper * 2 > count_lower: + return Remediation( + name="lower_case", + immediate_msg=f"\n- More than a third of your `{column}` column/key is uppercase. Uppercase {column}s tends to perform worse than a mixture of case encountered in normal language. We recommend to lower case the data if that makes sense in your domain. See https://platform.openai.com/docs/guides/fine-tuning/preparing-your-dataset for more details", + optional_msg=f"Lowercase all your data in column/key `{column}`", + optional_fn=lower_case, + ) + return None + + +def read_any_format( + fname: str, fields: list[str] = ["prompt", "completion"] +) -> tuple[pd.DataFrame | None, Remediation]: + """ + This function will read a file saved in .csv, .json, .txt, .xlsx or .tsv format using pandas. + - for .xlsx it will read the first sheet + - for .txt it will assume completions and split on newline + """ + remediation = None + necessary_msg = None + immediate_msg = None + error_msg = None + df = None + + if os.path.isfile(fname): + try: + if fname.lower().endswith(".csv") or fname.lower().endswith(".tsv"): + file_extension_str, separator = ("CSV", ",") if fname.lower().endswith(".csv") else ("TSV", "\t") + immediate_msg = ( + f"\n- Based on your file extension, your file is formatted as a {file_extension_str} file" + ) + necessary_msg = f"Your format `{file_extension_str}` will be converted to `JSONL`" + df = pd.read_csv(fname, sep=separator, dtype=str).fillna("") + elif fname.lower().endswith(".xlsx"): + immediate_msg = "\n- Based on your file extension, your file is formatted as an Excel file" + necessary_msg = "Your format `XLSX` will be converted to `JSONL`" + xls = pd.ExcelFile(fname) + sheets = xls.sheet_names + if len(sheets) > 1: + immediate_msg += "\n- Your Excel file contains more than one sheet. Please either save as csv or ensure all data is present in the first sheet. WARNING: Reading only the first sheet..." + df = pd.read_excel(fname, dtype=str).fillna("") + elif fname.lower().endswith(".txt"): + immediate_msg = "\n- Based on your file extension, you provided a text file" + necessary_msg = "Your format `TXT` will be converted to `JSONL`" + with open(fname, "r") as f: + content = f.read() + df = pd.DataFrame( + [["", line] for line in content.split("\n")], + columns=fields, + dtype=str, + ).fillna("") + elif fname.lower().endswith(".jsonl"): + df = pd.read_json(fname, lines=True, dtype=str).fillna("") # type: ignore + if len(df) == 1: # type: ignore + # this is NOT what we expect for a .jsonl file + immediate_msg = "\n- Your JSONL file appears to be in a JSON format. Your file will be converted to JSONL format" + necessary_msg = "Your format `JSON` will be converted to `JSONL`" + df = pd.read_json(fname, dtype=str).fillna("") # type: ignore + else: + pass # this is what we expect for a .jsonl file + elif fname.lower().endswith(".json"): + try: + # to handle case where .json file is actually a .jsonl file + df = pd.read_json(fname, lines=True, dtype=str).fillna("") # type: ignore + if len(df) == 1: # type: ignore + # this code path corresponds to a .json file that has one line + df = pd.read_json(fname, dtype=str).fillna("") # type: ignore + else: + # this is NOT what we expect for a .json file + immediate_msg = "\n- Your JSON file appears to be in a JSONL format. Your file will be converted to JSONL format" + necessary_msg = "Your format `JSON` will be converted to `JSONL`" + except ValueError: + # this code path corresponds to a .json file that has multiple lines (i.e. it is indented) + df = pd.read_json(fname, dtype=str).fillna("") # type: ignore + else: + error_msg = ( + "Your file must have one of the following extensions: .CSV, .TSV, .XLSX, .TXT, .JSON or .JSONL" + ) + if "." in fname: + error_msg += f" Your file `{fname}` ends with the extension `.{fname.split('.')[-1]}` which is not supported." + else: + error_msg += f" Your file `{fname}` is missing a file extension." + + except (ValueError, TypeError): + file_extension_str = fname.split(".")[-1].upper() + error_msg = f"Your file `{fname}` does not appear to be in valid {file_extension_str} format. Please ensure your file is formatted as a valid {file_extension_str} file." + + else: + error_msg = f"File {fname} does not exist." + + remediation = Remediation( + name="read_any_format", + necessary_msg=necessary_msg, + immediate_msg=immediate_msg, + error_msg=error_msg, + ) + return df, remediation + + +def format_inferrer_validator(df: pd.DataFrame) -> Remediation: + """ + This validator will infer the likely fine-tuning format of the data, and display it to the user if it is classification. + It will also suggest to use ada and explain train/validation split benefits. + """ + ft_type = infer_task_type(df) + immediate_msg = None + if ft_type == "classification": + immediate_msg = f"\n- Based on your data it seems like you're trying to fine-tune a model for {ft_type}\n- For classification, we recommend you try one of the faster and cheaper models, such as `ada`\n- For classification, you can estimate the expected model performance by keeping a held out dataset, which is not used for training" + return Remediation(name="num_examples", immediate_msg=immediate_msg) + + +def apply_necessary_remediation(df: OptionalDataFrameT, remediation: Remediation) -> OptionalDataFrameT: + """ + This function will apply a necessary remediation to a dataframe, or print an error message if one exists. + """ + if remediation.error_msg is not None: + sys.stderr.write(f"\n\nERROR in {remediation.name} validator: {remediation.error_msg}\n\nAborting...") + sys.exit(1) + if remediation.immediate_msg is not None: + sys.stdout.write(remediation.immediate_msg) + if remediation.necessary_fn is not None: + df = remediation.necessary_fn(df) + return df + + +def accept_suggestion(input_text: str, auto_accept: bool) -> bool: + sys.stdout.write(input_text) + if auto_accept: + sys.stdout.write("Y\n") + return True + return input().lower() != "n" + + +def apply_optional_remediation( + df: pd.DataFrame, remediation: Remediation, auto_accept: bool +) -> tuple[pd.DataFrame, bool]: + """ + This function will apply an optional remediation to a dataframe, based on the user input. + """ + optional_applied = False + input_text = f"- [Recommended] {remediation.optional_msg} [Y/n]: " + if remediation.optional_msg is not None: + if accept_suggestion(input_text, auto_accept): + assert remediation.optional_fn is not None + df = remediation.optional_fn(df) + optional_applied = True + if remediation.necessary_msg is not None: + sys.stdout.write(f"- [Necessary] {remediation.necessary_msg}\n") + return df, optional_applied + + +def estimate_fine_tuning_time(df: pd.DataFrame) -> None: + """ + Estimate the time it'll take to fine-tune the dataset + """ + ft_format = infer_task_type(df) + expected_time = 1.0 + if ft_format == "classification": + num_examples = len(df) + expected_time = num_examples * 1.44 + else: + size = df.memory_usage(index=True).sum() + expected_time = size * 0.0515 + + def format_time(time: float) -> str: + if time < 60: + return f"{round(time, 2)} seconds" + elif time < 3600: + return f"{round(time / 60, 2)} minutes" + elif time < 86400: + return f"{round(time / 3600, 2)} hours" + else: + return f"{round(time / 86400, 2)} days" + + time_string = format_time(expected_time + 140) + sys.stdout.write( + f"Once your model starts training, it'll approximately take {time_string} to train a `curie` model, and less for `ada` and `babbage`. Queue will approximately take half an hour per job ahead of you.\n" + ) + + +def get_outfnames(fname: str, split: bool) -> list[str]: + suffixes = ["_train", "_valid"] if split else [""] + i = 0 + while True: + index_suffix = f" ({i})" if i > 0 else "" + candidate_fnames = [f"{os.path.splitext(fname)[0]}_prepared{suffix}{index_suffix}.jsonl" for suffix in suffixes] + if not any(os.path.isfile(f) for f in candidate_fnames): + return candidate_fnames + i += 1 + + +def get_classification_hyperparams(df: pd.DataFrame) -> tuple[int, object]: + n_classes = df.completion.nunique() + pos_class = None + if n_classes == 2: + pos_class = df.completion.value_counts().index[0] + return n_classes, pos_class + + +def write_out_file(df: pd.DataFrame, fname: str, any_remediations: bool, auto_accept: bool) -> None: + """ + This function will write out a dataframe to a file, if the user would like to proceed, and also offer a fine-tuning command with the newly created file. + For classification it will optionally ask the user if they would like to split the data into train/valid files, and modify the suggested command to include the valid set. + """ + ft_format = infer_task_type(df) + common_prompt_suffix = get_common_xfix(df.prompt, xfix="suffix") + common_completion_suffix = get_common_xfix(df.completion, xfix="suffix") + + split = False + input_text = "- [Recommended] Would you like to split into training and validation set? [Y/n]: " + if ft_format == "classification": + if accept_suggestion(input_text, auto_accept): + split = True + + additional_params = "" + common_prompt_suffix_new_line_handled = common_prompt_suffix.replace("\n", "\\n") + common_completion_suffix_new_line_handled = common_completion_suffix.replace("\n", "\\n") + optional_ending_string = ( + f' Make sure to include `stop=["{common_completion_suffix_new_line_handled}"]` so that the generated texts ends at the expected place.' + if len(common_completion_suffix_new_line_handled) > 0 + else "" + ) + + input_text = "\n\nYour data will be written to a new JSONL file. Proceed [Y/n]: " + + if not any_remediations and not split: + sys.stdout.write( + f'\nYou can use your file for fine-tuning:\n> openai api fine_tunes.create -t "{fname}"{additional_params}\n\nAfter you’ve fine-tuned a model, remember that your prompt has to end with the indicator string `{common_prompt_suffix_new_line_handled}` for the model to start generating completions, rather than continuing with the prompt.{optional_ending_string}\n' + ) + estimate_fine_tuning_time(df) + + elif accept_suggestion(input_text, auto_accept): + fnames = get_outfnames(fname, split) + if split: + assert len(fnames) == 2 and "train" in fnames[0] and "valid" in fnames[1] + MAX_VALID_EXAMPLES = 1000 + n_train = max(len(df) - MAX_VALID_EXAMPLES, int(len(df) * 0.8)) + df_train = df.sample(n=n_train, random_state=42) + df_valid = df.drop(df_train.index) + df_train[["prompt", "completion"]].to_json( # type: ignore + fnames[0], lines=True, orient="records", force_ascii=False, indent=None + ) + df_valid[["prompt", "completion"]].to_json( + fnames[1], lines=True, orient="records", force_ascii=False, indent=None + ) + + n_classes, pos_class = get_classification_hyperparams(df) + additional_params += " --compute_classification_metrics" + if n_classes == 2: + additional_params += f' --classification_positive_class "{pos_class}"' + else: + additional_params += f" --classification_n_classes {n_classes}" + else: + assert len(fnames) == 1 + df[["prompt", "completion"]].to_json( + fnames[0], lines=True, orient="records", force_ascii=False, indent=None + ) + + # Add -v VALID_FILE if we split the file into train / valid + files_string = ("s" if split else "") + " to `" + ("` and `".join(fnames)) + valid_string = f' -v "{fnames[1]}"' if split else "" + separator_reminder = ( + "" + if len(common_prompt_suffix_new_line_handled) == 0 + else f"After you’ve fine-tuned a model, remember that your prompt has to end with the indicator string `{common_prompt_suffix_new_line_handled}` for the model to start generating completions, rather than continuing with the prompt." + ) + sys.stdout.write( + f'\nWrote modified file{files_string}`\nFeel free to take a look!\n\nNow use that file when fine-tuning:\n> openai api fine_tunes.create -t "{fnames[0]}"{valid_string}{additional_params}\n\n{separator_reminder}{optional_ending_string}\n' + ) + estimate_fine_tuning_time(df) + else: + sys.stdout.write("Aborting... did not write the file\n") + + +def infer_task_type(df: pd.DataFrame) -> str: + """ + Infer the likely fine-tuning task type from the data + """ + CLASSIFICATION_THRESHOLD = 3 # min_average instances of each class + if sum(df.prompt.str.len()) == 0: + return "open-ended generation" + + if len(df.completion.unique()) < len(df) / CLASSIFICATION_THRESHOLD: + return "classification" + + return "conditional generation" + + +def get_common_xfix(series: Any, xfix: str = "suffix") -> str: + """ + Finds the longest common suffix or prefix of all the values in a series + """ + common_xfix = "" + while True: + common_xfixes = ( + series.str[-(len(common_xfix) + 1) :] if xfix == "suffix" else series.str[: len(common_xfix) + 1] + ) # first few or last few characters + if common_xfixes.nunique() != 1: # we found the character at which we don't have a unique xfix anymore + break + elif common_xfix == common_xfixes.values[0]: # the entire first row is a prefix of every other row + break + else: # the first or last few characters are still common across all rows - let's try to add one more + common_xfix = common_xfixes.values[0] + return common_xfix + + +Validator: TypeAlias = "Callable[[pd.DataFrame], Remediation | None]" + + +def get_validators() -> list[Validator]: + return [ + num_examples_validator, + lambda x: necessary_column_validator(x, "prompt"), + lambda x: necessary_column_validator(x, "completion"), + additional_column_validator, + non_empty_field_validator, + format_inferrer_validator, + duplicated_rows_validator, + long_examples_validator, + lambda x: lower_case_validator(x, "prompt"), + lambda x: lower_case_validator(x, "completion"), + common_prompt_suffix_validator, + common_prompt_prefix_validator, + common_completion_prefix_validator, + common_completion_suffix_validator, + completions_space_start_validator, + ] + + +def apply_validators( + df: pd.DataFrame, + fname: str, + remediation: Remediation | None, + validators: list[Validator], + auto_accept: bool, + write_out_file_func: Callable[..., Any], +) -> None: + optional_remediations: list[Remediation] = [] + if remediation is not None: + optional_remediations.append(remediation) + for validator in validators: + remediation = validator(df) + if remediation is not None: + optional_remediations.append(remediation) + df = apply_necessary_remediation(df, remediation) + + any_optional_or_necessary_remediations = any( + [ + remediation + for remediation in optional_remediations + if remediation.optional_msg is not None or remediation.necessary_msg is not None + ] + ) + any_necessary_applied = any( + [remediation for remediation in optional_remediations if remediation.necessary_msg is not None] + ) + any_optional_applied = False + + if any_optional_or_necessary_remediations: + sys.stdout.write("\n\nBased on the analysis we will perform the following actions:\n") + for remediation in optional_remediations: + df, optional_applied = apply_optional_remediation(df, remediation, auto_accept) + any_optional_applied = any_optional_applied or optional_applied + else: + sys.stdout.write("\n\nNo remediations found.\n") + + any_optional_or_necessary_applied = any_optional_applied or any_necessary_applied + + write_out_file_func(df, fname, any_optional_or_necessary_applied, auto_accept) diff --git a/.venv/Lib/site-packages/openai/lib/azure.py b/.venv/Lib/site-packages/openai/lib/azure.py new file mode 100644 index 00000000..b76b83c6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/lib/azure.py @@ -0,0 +1,542 @@ +from __future__ import annotations + +import os +import inspect +from typing import Any, Union, Mapping, TypeVar, Callable, Awaitable, overload +from typing_extensions import Self, override + +import httpx + +from .._types import NOT_GIVEN, Omit, Timeout, NotGiven +from .._utils import is_given, is_mapping +from .._client import OpenAI, AsyncOpenAI +from .._models import FinalRequestOptions +from .._streaming import Stream, AsyncStream +from .._exceptions import OpenAIError +from .._base_client import DEFAULT_MAX_RETRIES, BaseClient + +_deployments_endpoints = set( + [ + "/completions", + "/chat/completions", + "/embeddings", + "/audio/transcriptions", + "/audio/translations", + "/audio/speech", + "/images/generations", + ] +) + + +AzureADTokenProvider = Callable[[], str] +AsyncAzureADTokenProvider = Callable[[], "str | Awaitable[str]"] +_HttpxClientT = TypeVar("_HttpxClientT", bound=Union[httpx.Client, httpx.AsyncClient]) +_DefaultStreamT = TypeVar("_DefaultStreamT", bound=Union[Stream[Any], AsyncStream[Any]]) + + +# we need to use a sentinel API key value for Azure AD +# as we don't want to make the `api_key` in the main client Optional +# and Azure AD tokens may be retrieved on a per-request basis +API_KEY_SENTINEL = "".join(["<", "missing API key", ">"]) + + +class MutuallyExclusiveAuthError(OpenAIError): + def __init__(self) -> None: + super().__init__( + "The `api_key`, `azure_ad_token` and `azure_ad_token_provider` arguments are mutually exclusive; Only one can be passed at a time" + ) + + +class BaseAzureClient(BaseClient[_HttpxClientT, _DefaultStreamT]): + @override + def _build_request( + self, + options: FinalRequestOptions, + ) -> httpx.Request: + if options.url in _deployments_endpoints and is_mapping(options.json_data): + model = options.json_data.get("model") + if model is not None and not "/deployments" in str(self.base_url): + options.url = f"/deployments/{model}{options.url}" + + return super()._build_request(options) + + +class AzureOpenAI(BaseAzureClient[httpx.Client, Stream[Any]], OpenAI): + @overload + def __init__( + self, + *, + azure_endpoint: str, + azure_deployment: str | None = None, + api_version: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AzureADTokenProvider | None = None, + organization: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.Client | None = None, + _strict_response_validation: bool = False, + ) -> None: + ... + + @overload + def __init__( + self, + *, + azure_deployment: str | None = None, + api_version: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AzureADTokenProvider | None = None, + organization: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.Client | None = None, + _strict_response_validation: bool = False, + ) -> None: + ... + + @overload + def __init__( + self, + *, + base_url: str, + api_version: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AzureADTokenProvider | None = None, + organization: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.Client | None = None, + _strict_response_validation: bool = False, + ) -> None: + ... + + def __init__( + self, + *, + api_version: str | None = None, + azure_endpoint: str | None = None, + azure_deployment: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AzureADTokenProvider | None = None, + organization: str | None = None, + project: str | None = None, + base_url: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.Client | None = None, + _strict_response_validation: bool = False, + ) -> None: + """Construct a new synchronous azure openai client instance. + + This automatically infers the following arguments from their corresponding environment variables if they are not provided: + - `api_key` from `AZURE_OPENAI_API_KEY` + - `organization` from `OPENAI_ORG_ID` + - `project` from `OPENAI_PROJECT_ID` + - `azure_ad_token` from `AZURE_OPENAI_AD_TOKEN` + - `api_version` from `OPENAI_API_VERSION` + - `azure_endpoint` from `AZURE_OPENAI_ENDPOINT` + + Args: + azure_endpoint: Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + + azure_ad_token: Your Azure Active Directory token, https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id + + azure_ad_token_provider: A function that returns an Azure Active Directory token, will be invoked on every request. + + azure_deployment: A model deployment, if given sets the base client URL to include `/deployments/{azure_deployment}`. + Note: this means you won't be able to use non-deployment endpoints. Not supported with Assistants APIs. + """ + if api_key is None: + api_key = os.environ.get("AZURE_OPENAI_API_KEY") + + if azure_ad_token is None: + azure_ad_token = os.environ.get("AZURE_OPENAI_AD_TOKEN") + + if api_key is None and azure_ad_token is None and azure_ad_token_provider is None: + raise OpenAIError( + "Missing credentials. Please pass one of `api_key`, `azure_ad_token`, `azure_ad_token_provider`, or the `AZURE_OPENAI_API_KEY` or `AZURE_OPENAI_AD_TOKEN` environment variables." + ) + + if api_version is None: + api_version = os.environ.get("OPENAI_API_VERSION") + + if api_version is None: + raise ValueError( + "Must provide either the `api_version` argument or the `OPENAI_API_VERSION` environment variable" + ) + + if default_query is None: + default_query = {"api-version": api_version} + else: + default_query = {**default_query, "api-version": api_version} + + if base_url is None: + if azure_endpoint is None: + azure_endpoint = os.environ.get("AZURE_OPENAI_ENDPOINT") + + if azure_endpoint is None: + raise ValueError( + "Must provide one of the `base_url` or `azure_endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable" + ) + + if azure_deployment is not None: + base_url = f"{azure_endpoint}/openai/deployments/{azure_deployment}" + else: + base_url = f"{azure_endpoint}/openai" + else: + if azure_endpoint is not None: + raise ValueError("base_url and azure_endpoint are mutually exclusive") + + if api_key is None: + # define a sentinel value to avoid any typing issues + api_key = API_KEY_SENTINEL + + super().__init__( + api_key=api_key, + organization=organization, + project=project, + base_url=base_url, + timeout=timeout, + max_retries=max_retries, + default_headers=default_headers, + default_query=default_query, + http_client=http_client, + _strict_response_validation=_strict_response_validation, + ) + self._api_version = api_version + self._azure_ad_token = azure_ad_token + self._azure_ad_token_provider = azure_ad_token_provider + + @override + def copy( + self, + *, + api_key: str | None = None, + organization: str | None = None, + project: str | None = None, + api_version: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AzureADTokenProvider | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.Client | None = None, + max_retries: int | NotGiven = NOT_GIVEN, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + return super().copy( + api_key=api_key, + organization=organization, + project=project, + base_url=base_url, + timeout=timeout, + http_client=http_client, + max_retries=max_retries, + default_headers=default_headers, + set_default_headers=set_default_headers, + default_query=default_query, + set_default_query=set_default_query, + _extra_kwargs={ + "api_version": api_version or self._api_version, + "azure_ad_token": azure_ad_token or self._azure_ad_token, + "azure_ad_token_provider": azure_ad_token_provider or self._azure_ad_token_provider, + **_extra_kwargs, + }, + ) + + with_options = copy + + def _get_azure_ad_token(self) -> str | None: + if self._azure_ad_token is not None: + return self._azure_ad_token + + provider = self._azure_ad_token_provider + if provider is not None: + token = provider() + if not token or not isinstance(token, str): # pyright: ignore[reportUnnecessaryIsInstance] + raise ValueError( + f"Expected `azure_ad_token_provider` argument to return a string but it returned {token}", + ) + return token + + return None + + @override + def _prepare_options(self, options: FinalRequestOptions) -> None: + headers: dict[str, str | Omit] = {**options.headers} if is_given(options.headers) else {} + options.headers = headers + + azure_ad_token = self._get_azure_ad_token() + if azure_ad_token is not None: + if headers.get("Authorization") is None: + headers["Authorization"] = f"Bearer {azure_ad_token}" + elif self.api_key is not API_KEY_SENTINEL: + if headers.get("api-key") is None: + headers["api-key"] = self.api_key + else: + # should never be hit + raise ValueError("Unable to handle auth") + + return super()._prepare_options(options) + + +class AsyncAzureOpenAI(BaseAzureClient[httpx.AsyncClient, AsyncStream[Any]], AsyncOpenAI): + @overload + def __init__( + self, + *, + azure_endpoint: str, + azure_deployment: str | None = None, + api_version: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AsyncAzureADTokenProvider | None = None, + organization: str | None = None, + project: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.AsyncClient | None = None, + _strict_response_validation: bool = False, + ) -> None: + ... + + @overload + def __init__( + self, + *, + azure_deployment: str | None = None, + api_version: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AsyncAzureADTokenProvider | None = None, + organization: str | None = None, + project: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.AsyncClient | None = None, + _strict_response_validation: bool = False, + ) -> None: + ... + + @overload + def __init__( + self, + *, + base_url: str, + api_version: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AsyncAzureADTokenProvider | None = None, + organization: str | None = None, + project: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.AsyncClient | None = None, + _strict_response_validation: bool = False, + ) -> None: + ... + + def __init__( + self, + *, + azure_endpoint: str | None = None, + azure_deployment: str | None = None, + api_version: str | None = None, + api_key: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AsyncAzureADTokenProvider | None = None, + organization: str | None = None, + project: str | None = None, + base_url: str | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + http_client: httpx.AsyncClient | None = None, + _strict_response_validation: bool = False, + ) -> None: + """Construct a new asynchronous azure openai client instance. + + This automatically infers the following arguments from their corresponding environment variables if they are not provided: + - `api_key` from `AZURE_OPENAI_API_KEY` + - `organization` from `OPENAI_ORG_ID` + - `project` from `OPENAI_PROJECT_ID` + - `azure_ad_token` from `AZURE_OPENAI_AD_TOKEN` + - `api_version` from `OPENAI_API_VERSION` + - `azure_endpoint` from `AZURE_OPENAI_ENDPOINT` + + Args: + azure_endpoint: Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + + azure_ad_token: Your Azure Active Directory token, https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id + + azure_ad_token_provider: A function that returns an Azure Active Directory token, will be invoked on every request. + + azure_deployment: A model deployment, if given sets the base client URL to include `/deployments/{azure_deployment}`. + Note: this means you won't be able to use non-deployment endpoints. Not supported with Assistants APIs. + """ + if api_key is None: + api_key = os.environ.get("AZURE_OPENAI_API_KEY") + + if azure_ad_token is None: + azure_ad_token = os.environ.get("AZURE_OPENAI_AD_TOKEN") + + if api_key is None and azure_ad_token is None and azure_ad_token_provider is None: + raise OpenAIError( + "Missing credentials. Please pass one of `api_key`, `azure_ad_token`, `azure_ad_token_provider`, or the `AZURE_OPENAI_API_KEY` or `AZURE_OPENAI_AD_TOKEN` environment variables." + ) + + if api_version is None: + api_version = os.environ.get("OPENAI_API_VERSION") + + if api_version is None: + raise ValueError( + "Must provide either the `api_version` argument or the `OPENAI_API_VERSION` environment variable" + ) + + if default_query is None: + default_query = {"api-version": api_version} + else: + default_query = {**default_query, "api-version": api_version} + + if base_url is None: + if azure_endpoint is None: + azure_endpoint = os.environ.get("AZURE_OPENAI_ENDPOINT") + + if azure_endpoint is None: + raise ValueError( + "Must provide one of the `base_url` or `azure_endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable" + ) + + if azure_deployment is not None: + base_url = f"{azure_endpoint}/openai/deployments/{azure_deployment}" + else: + base_url = f"{azure_endpoint}/openai" + else: + if azure_endpoint is not None: + raise ValueError("base_url and azure_endpoint are mutually exclusive") + + if api_key is None: + # define a sentinel value to avoid any typing issues + api_key = API_KEY_SENTINEL + + super().__init__( + api_key=api_key, + organization=organization, + project=project, + base_url=base_url, + timeout=timeout, + max_retries=max_retries, + default_headers=default_headers, + default_query=default_query, + http_client=http_client, + _strict_response_validation=_strict_response_validation, + ) + self._api_version = api_version + self._azure_ad_token = azure_ad_token + self._azure_ad_token_provider = azure_ad_token_provider + + @override + def copy( + self, + *, + api_key: str | None = None, + organization: str | None = None, + project: str | None = None, + api_version: str | None = None, + azure_ad_token: str | None = None, + azure_ad_token_provider: AsyncAzureADTokenProvider | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.AsyncClient | None = None, + max_retries: int | NotGiven = NOT_GIVEN, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + return super().copy( + api_key=api_key, + organization=organization, + project=project, + base_url=base_url, + timeout=timeout, + http_client=http_client, + max_retries=max_retries, + default_headers=default_headers, + set_default_headers=set_default_headers, + default_query=default_query, + set_default_query=set_default_query, + _extra_kwargs={ + "api_version": api_version or self._api_version, + "azure_ad_token": azure_ad_token or self._azure_ad_token, + "azure_ad_token_provider": azure_ad_token_provider or self._azure_ad_token_provider, + **_extra_kwargs, + }, + ) + + with_options = copy + + async def _get_azure_ad_token(self) -> str | None: + if self._azure_ad_token is not None: + return self._azure_ad_token + + provider = self._azure_ad_token_provider + if provider is not None: + token = provider() + if inspect.isawaitable(token): + token = await token + if not token or not isinstance(token, str): + raise ValueError( + f"Expected `azure_ad_token_provider` argument to return a string but it returned {token}", + ) + return token + + return None + + @override + async def _prepare_options(self, options: FinalRequestOptions) -> None: + headers: dict[str, str | Omit] = {**options.headers} if is_given(options.headers) else {} + options.headers = headers + + azure_ad_token = await self._get_azure_ad_token() + if azure_ad_token is not None: + if headers.get("Authorization") is None: + headers["Authorization"] = f"Bearer {azure_ad_token}" + elif self.api_key is not API_KEY_SENTINEL: + if headers.get("api-key") is None: + headers["api-key"] = self.api_key + else: + # should never be hit + raise ValueError("Unable to handle auth") + + return await super()._prepare_options(options) diff --git a/.venv/Lib/site-packages/openai/lib/streaming/__init__.py b/.venv/Lib/site-packages/openai/lib/streaming/__init__.py new file mode 100644 index 00000000..eb378d25 --- /dev/null +++ b/.venv/Lib/site-packages/openai/lib/streaming/__init__.py @@ -0,0 +1,8 @@ +from ._assistants import ( + AssistantEventHandler as AssistantEventHandler, + AssistantEventHandlerT as AssistantEventHandlerT, + AssistantStreamManager as AssistantStreamManager, + AsyncAssistantEventHandler as AsyncAssistantEventHandler, + AsyncAssistantEventHandlerT as AsyncAssistantEventHandlerT, + AsyncAssistantStreamManager as AsyncAssistantStreamManager, +) diff --git a/.venv/Lib/site-packages/openai/lib/streaming/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/lib/streaming/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..c0666de7 Binary files /dev/null and b/.venv/Lib/site-packages/openai/lib/streaming/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/lib/streaming/__pycache__/_assistants.cpython-311.pyc b/.venv/Lib/site-packages/openai/lib/streaming/__pycache__/_assistants.cpython-311.pyc new file mode 100644 index 00000000..93b9d187 Binary files /dev/null and b/.venv/Lib/site-packages/openai/lib/streaming/__pycache__/_assistants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/lib/streaming/_assistants.py b/.venv/Lib/site-packages/openai/lib/streaming/_assistants.py new file mode 100644 index 00000000..03d97ec2 --- /dev/null +++ b/.venv/Lib/site-packages/openai/lib/streaming/_assistants.py @@ -0,0 +1,1035 @@ +from __future__ import annotations + +import asyncio +from types import TracebackType +from typing import TYPE_CHECKING, Any, Generic, TypeVar, Callable, Iterable, Iterator, cast +from typing_extensions import Awaitable, AsyncIterable, AsyncIterator, assert_never + +import httpx + +from ..._utils import is_dict, is_list, consume_sync_iterator, consume_async_iterator +from ..._models import construct_type +from ..._streaming import Stream, AsyncStream +from ...types.beta import AssistantStreamEvent +from ...types.beta.threads import ( + Run, + Text, + Message, + ImageFile, + TextDelta, + MessageDelta, + MessageContent, + MessageContentDelta, +) +from ...types.beta.threads.runs import RunStep, ToolCall, RunStepDelta, ToolCallDelta + + +class AssistantEventHandler: + text_deltas: Iterable[str] + """Iterator over just the text deltas in the stream. + + This corresponds to the `thread.message.delta` event + in the API. + + ```py + for text in stream.text_deltas: + print(text, end="", flush=True) + print() + ``` + """ + + def __init__(self) -> None: + self._current_event: AssistantStreamEvent | None = None + self._current_message_content_index: int | None = None + self._current_message_content: MessageContent | None = None + self._current_tool_call_index: int | None = None + self._current_tool_call: ToolCall | None = None + self.__current_run_step_id: str | None = None + self.__current_run: Run | None = None + self.__run_step_snapshots: dict[str, RunStep] = {} + self.__message_snapshots: dict[str, Message] = {} + self.__current_message_snapshot: Message | None = None + + self.text_deltas = self.__text_deltas__() + self._iterator = self.__stream__() + self.__stream: Stream[AssistantStreamEvent] | None = None + + def _init(self, stream: Stream[AssistantStreamEvent]) -> None: + if self.__stream: + raise RuntimeError( + "A single event handler cannot be shared between multiple streams; You will need to construct a new event handler instance" + ) + + self.__stream = stream + + def __next__(self) -> AssistantStreamEvent: + return self._iterator.__next__() + + def __iter__(self) -> Iterator[AssistantStreamEvent]: + for item in self._iterator: + yield item + + @property + def current_event(self) -> AssistantStreamEvent | None: + return self._current_event + + @property + def current_run(self) -> Run | None: + return self.__current_run + + @property + def current_run_step_snapshot(self) -> RunStep | None: + if not self.__current_run_step_id: + return None + + return self.__run_step_snapshots[self.__current_run_step_id] + + @property + def current_message_snapshot(self) -> Message | None: + return self.__current_message_snapshot + + def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called when the context manager exits. + """ + if self.__stream: + self.__stream.close() + + def until_done(self) -> None: + """Waits until the stream has been consumed""" + consume_sync_iterator(self) + + def get_final_run(self) -> Run: + """Wait for the stream to finish and returns the completed Run object""" + self.until_done() + + if not self.__current_run: + raise RuntimeError("No final run object found") + + return self.__current_run + + def get_final_run_steps(self) -> list[RunStep]: + """Wait for the stream to finish and returns the steps taken in this run""" + self.until_done() + + if not self.__run_step_snapshots: + raise RuntimeError("No run steps found") + + return [step for step in self.__run_step_snapshots.values()] + + def get_final_messages(self) -> list[Message]: + """Wait for the stream to finish and returns the messages emitted in this run""" + self.until_done() + + if not self.__message_snapshots: + raise RuntimeError("No messages found") + + return [message for message in self.__message_snapshots.values()] + + def __text_deltas__(self) -> Iterator[str]: + for event in self: + if event.event != "thread.message.delta": + continue + + for content_delta in event.data.delta.content or []: + if content_delta.type == "text" and content_delta.text and content_delta.text.value: + yield content_delta.text.value + + # event handlers + + def on_end(self) -> None: + """Fires when the stream has finished. + + This happens if the stream is read to completion + or if an exception occurs during iteration. + """ + + def on_event(self, event: AssistantStreamEvent) -> None: + """Callback that is fired for every Server-Sent-Event""" + + def on_run_step_created(self, run_step: RunStep) -> None: + """Callback that is fired when a run step is created""" + + def on_run_step_delta(self, delta: RunStepDelta, snapshot: RunStep) -> None: + """Callback that is fired whenever a run step delta is returned from the API + + The first argument is just the delta as sent by the API and the second argument + is the accumulated snapshot of the run step. For example, a tool calls event may + look like this: + + # delta + tool_calls=[ + RunStepDeltaToolCallsCodeInterpreter( + index=0, + type='code_interpreter', + id=None, + code_interpreter=CodeInterpreter(input=' sympy', outputs=None) + ) + ] + # snapshot + tool_calls=[ + CodeToolCall( + id='call_wKayJlcYV12NiadiZuJXxcfx', + code_interpreter=CodeInterpreter(input='from sympy', outputs=[]), + type='code_interpreter', + index=0 + ) + ], + """ + + def on_run_step_done(self, run_step: RunStep) -> None: + """Callback that is fired when a run step is completed""" + + def on_tool_call_created(self, tool_call: ToolCall) -> None: + """Callback that is fired when a tool call is created""" + + def on_tool_call_delta(self, delta: ToolCallDelta, snapshot: ToolCall) -> None: + """Callback that is fired when a tool call delta is encountered""" + + def on_tool_call_done(self, tool_call: ToolCall) -> None: + """Callback that is fired when a tool call delta is encountered""" + + def on_exception(self, exception: Exception) -> None: + """Fired whenever an exception happens during streaming""" + + def on_timeout(self) -> None: + """Fires if the request times out""" + + def on_message_created(self, message: Message) -> None: + """Callback that is fired when a message is created""" + + def on_message_delta(self, delta: MessageDelta, snapshot: Message) -> None: + """Callback that is fired whenever a message delta is returned from the API + + The first argument is just the delta as sent by the API and the second argument + is the accumulated snapshot of the message. For example, a text content event may + look like this: + + # delta + MessageDeltaText( + index=0, + type='text', + text=Text( + value=' Jane' + ), + ) + # snapshot + MessageContentText( + index=0, + type='text', + text=Text( + value='Certainly, Jane' + ), + ) + """ + + def on_message_done(self, message: Message) -> None: + """Callback that is fired when a message is completed""" + + def on_text_created(self, text: Text) -> None: + """Callback that is fired when a text content block is created""" + + def on_text_delta(self, delta: TextDelta, snapshot: Text) -> None: + """Callback that is fired whenever a text content delta is returned + by the API. + + The first argument is just the delta as sent by the API and the second argument + is the accumulated snapshot of the text. For example: + + on_text_delta(TextDelta(value="The"), Text(value="The")), + on_text_delta(TextDelta(value=" solution"), Text(value="The solution")), + on_text_delta(TextDelta(value=" to"), Text(value="The solution to")), + on_text_delta(TextDelta(value=" the"), Text(value="The solution to the")), + on_text_delta(TextDelta(value=" equation"), Text(value="The solution to the equivalent")), + """ + + def on_text_done(self, text: Text) -> None: + """Callback that is fired when a text content block is finished""" + + def on_image_file_done(self, image_file: ImageFile) -> None: + """Callback that is fired when an image file block is finished""" + + def _emit_sse_event(self, event: AssistantStreamEvent) -> None: + self._current_event = event + self.on_event(event) + + self.__current_message_snapshot, new_content = accumulate_event( + event=event, + current_message_snapshot=self.__current_message_snapshot, + ) + if self.__current_message_snapshot is not None: + self.__message_snapshots[self.__current_message_snapshot.id] = self.__current_message_snapshot + + accumulate_run_step( + event=event, + run_step_snapshots=self.__run_step_snapshots, + ) + + for content_delta in new_content: + assert self.__current_message_snapshot is not None + + block = self.__current_message_snapshot.content[content_delta.index] + if block.type == "text": + self.on_text_created(block.text) + + if ( + event.event == "thread.run.completed" + or event.event == "thread.run.cancelled" + or event.event == "thread.run.expired" + or event.event == "thread.run.failed" + or event.event == "thread.run.requires_action" + ): + self.__current_run = event.data + if self._current_tool_call: + self.on_tool_call_done(self._current_tool_call) + elif ( + event.event == "thread.run.created" + or event.event == "thread.run.in_progress" + or event.event == "thread.run.cancelling" + or event.event == "thread.run.queued" + ): + self.__current_run = event.data + elif event.event == "thread.message.created": + self.on_message_created(event.data) + elif event.event == "thread.message.delta": + snapshot = self.__current_message_snapshot + assert snapshot is not None + + message_delta = event.data.delta + if message_delta.content is not None: + for content_delta in message_delta.content: + if content_delta.type == "text" and content_delta.text: + snapshot_content = snapshot.content[content_delta.index] + assert snapshot_content.type == "text" + self.on_text_delta(content_delta.text, snapshot_content.text) + + # If the delta is for a new message content: + # - emit on_text_done/on_image_file_done for the previous message content + # - emit on_text_created/on_image_created for the new message content + if content_delta.index != self._current_message_content_index: + if self._current_message_content is not None: + if self._current_message_content.type == "text": + self.on_text_done(self._current_message_content.text) + elif self._current_message_content.type == "image_file": + self.on_image_file_done(self._current_message_content.image_file) + + self._current_message_content_index = content_delta.index + self._current_message_content = snapshot.content[content_delta.index] + + # Update the current_message_content (delta event is correctly emitted already) + self._current_message_content = snapshot.content[content_delta.index] + + self.on_message_delta(event.data.delta, snapshot) + elif event.event == "thread.message.completed" or event.event == "thread.message.incomplete": + self.__current_message_snapshot = event.data + self.__message_snapshots[event.data.id] = event.data + + if self._current_message_content_index is not None: + content = event.data.content[self._current_message_content_index] + if content.type == "text": + self.on_text_done(content.text) + elif content.type == "image_file": + self.on_image_file_done(content.image_file) + + self.on_message_done(event.data) + elif event.event == "thread.run.step.created": + self.__current_run_step_id = event.data.id + self.on_run_step_created(event.data) + elif event.event == "thread.run.step.in_progress": + self.__current_run_step_id = event.data.id + elif event.event == "thread.run.step.delta": + step_snapshot = self.__run_step_snapshots[event.data.id] + + run_step_delta = event.data.delta + if ( + run_step_delta.step_details + and run_step_delta.step_details.type == "tool_calls" + and run_step_delta.step_details.tool_calls is not None + ): + assert step_snapshot.step_details.type == "tool_calls" + for tool_call_delta in run_step_delta.step_details.tool_calls: + if tool_call_delta.index == self._current_tool_call_index: + self.on_tool_call_delta( + tool_call_delta, + step_snapshot.step_details.tool_calls[tool_call_delta.index], + ) + + # If the delta is for a new tool call: + # - emit on_tool_call_done for the previous tool_call + # - emit on_tool_call_created for the new tool_call + if tool_call_delta.index != self._current_tool_call_index: + if self._current_tool_call is not None: + self.on_tool_call_done(self._current_tool_call) + + self._current_tool_call_index = tool_call_delta.index + self._current_tool_call = step_snapshot.step_details.tool_calls[tool_call_delta.index] + self.on_tool_call_created(self._current_tool_call) + + # Update the current_tool_call (delta event is correctly emitted already) + self._current_tool_call = step_snapshot.step_details.tool_calls[tool_call_delta.index] + + self.on_run_step_delta( + event.data.delta, + step_snapshot, + ) + elif ( + event.event == "thread.run.step.completed" + or event.event == "thread.run.step.cancelled" + or event.event == "thread.run.step.expired" + or event.event == "thread.run.step.failed" + ): + if self._current_tool_call: + self.on_tool_call_done(self._current_tool_call) + + self.on_run_step_done(event.data) + self.__current_run_step_id = None + elif event.event == "thread.created" or event.event == "thread.message.in_progress" or event.event == "error": + # currently no special handling + ... + else: + # we only want to error at build-time + if TYPE_CHECKING: # type: ignore[unreachable] + assert_never(event) + + self._current_event = None + + def __stream__(self) -> Iterator[AssistantStreamEvent]: + stream = self.__stream + if not stream: + raise RuntimeError("Stream has not been started yet") + + try: + for event in stream: + self._emit_sse_event(event) + + yield event + except (httpx.TimeoutException, asyncio.TimeoutError) as exc: + self.on_timeout() + self.on_exception(exc) + raise + except Exception as exc: + self.on_exception(exc) + raise + finally: + self.on_end() + + +AssistantEventHandlerT = TypeVar("AssistantEventHandlerT", bound=AssistantEventHandler) + + +class AssistantStreamManager(Generic[AssistantEventHandlerT]): + """Wrapper over AssistantStreamEventHandler that is returned by `.stream()` + so that a context manager can be used. + + ```py + with client.threads.create_and_run_stream(...) as stream: + for event in stream: + ... + ``` + """ + + def __init__( + self, + api_request: Callable[[], Stream[AssistantStreamEvent]], + *, + event_handler: AssistantEventHandlerT, + ) -> None: + self.__stream: Stream[AssistantStreamEvent] | None = None + self.__event_handler = event_handler + self.__api_request = api_request + + def __enter__(self) -> AssistantEventHandlerT: + self.__stream = self.__api_request() + self.__event_handler._init(self.__stream) + return self.__event_handler + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__stream is not None: + self.__stream.close() + + +class AsyncAssistantEventHandler: + text_deltas: AsyncIterable[str] + """Iterator over just the text deltas in the stream. + + This corresponds to the `thread.message.delta` event + in the API. + + ```py + async for text in stream.text_deltas: + print(text, end="", flush=True) + print() + ``` + """ + + def __init__(self) -> None: + self._current_event: AssistantStreamEvent | None = None + self._current_message_content_index: int | None = None + self._current_message_content: MessageContent | None = None + self._current_tool_call_index: int | None = None + self._current_tool_call: ToolCall | None = None + self.__current_run_step_id: str | None = None + self.__current_run: Run | None = None + self.__run_step_snapshots: dict[str, RunStep] = {} + self.__message_snapshots: dict[str, Message] = {} + self.__current_message_snapshot: Message | None = None + + self.text_deltas = self.__text_deltas__() + self._iterator = self.__stream__() + self.__stream: AsyncStream[AssistantStreamEvent] | None = None + + def _init(self, stream: AsyncStream[AssistantStreamEvent]) -> None: + if self.__stream: + raise RuntimeError( + "A single event handler cannot be shared between multiple streams; You will need to construct a new event handler instance" + ) + + self.__stream = stream + + async def __anext__(self) -> AssistantStreamEvent: + return await self._iterator.__anext__() + + async def __aiter__(self) -> AsyncIterator[AssistantStreamEvent]: + async for item in self._iterator: + yield item + + async def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called when the context manager exits. + """ + if self.__stream: + await self.__stream.close() + + @property + def current_event(self) -> AssistantStreamEvent | None: + return self._current_event + + @property + def current_run(self) -> Run | None: + return self.__current_run + + @property + def current_run_step_snapshot(self) -> RunStep | None: + if not self.__current_run_step_id: + return None + + return self.__run_step_snapshots[self.__current_run_step_id] + + @property + def current_message_snapshot(self) -> Message | None: + return self.__current_message_snapshot + + async def until_done(self) -> None: + """Waits until the stream has been consumed""" + await consume_async_iterator(self) + + async def get_final_run(self) -> Run: + """Wait for the stream to finish and returns the completed Run object""" + await self.until_done() + + if not self.__current_run: + raise RuntimeError("No final run object found") + + return self.__current_run + + async def get_final_run_steps(self) -> list[RunStep]: + """Wait for the stream to finish and returns the steps taken in this run""" + await self.until_done() + + if not self.__run_step_snapshots: + raise RuntimeError("No run steps found") + + return [step for step in self.__run_step_snapshots.values()] + + async def get_final_messages(self) -> list[Message]: + """Wait for the stream to finish and returns the messages emitted in this run""" + await self.until_done() + + if not self.__message_snapshots: + raise RuntimeError("No messages found") + + return [message for message in self.__message_snapshots.values()] + + async def __text_deltas__(self) -> AsyncIterator[str]: + async for event in self: + if event.event != "thread.message.delta": + continue + + for content_delta in event.data.delta.content or []: + if content_delta.type == "text" and content_delta.text and content_delta.text.value: + yield content_delta.text.value + + # event handlers + + async def on_end(self) -> None: + """Fires when the stream has finished. + + This happens if the stream is read to completion + or if an exception occurs during iteration. + """ + + async def on_event(self, event: AssistantStreamEvent) -> None: + """Callback that is fired for every Server-Sent-Event""" + + async def on_run_step_created(self, run_step: RunStep) -> None: + """Callback that is fired when a run step is created""" + + async def on_run_step_delta(self, delta: RunStepDelta, snapshot: RunStep) -> None: + """Callback that is fired whenever a run step delta is returned from the API + + The first argument is just the delta as sent by the API and the second argument + is the accumulated snapshot of the run step. For example, a tool calls event may + look like this: + + # delta + tool_calls=[ + RunStepDeltaToolCallsCodeInterpreter( + index=0, + type='code_interpreter', + id=None, + code_interpreter=CodeInterpreter(input=' sympy', outputs=None) + ) + ] + # snapshot + tool_calls=[ + CodeToolCall( + id='call_wKayJlcYV12NiadiZuJXxcfx', + code_interpreter=CodeInterpreter(input='from sympy', outputs=[]), + type='code_interpreter', + index=0 + ) + ], + """ + + async def on_run_step_done(self, run_step: RunStep) -> None: + """Callback that is fired when a run step is completed""" + + async def on_tool_call_created(self, tool_call: ToolCall) -> None: + """Callback that is fired when a tool call is created""" + + async def on_tool_call_delta(self, delta: ToolCallDelta, snapshot: ToolCall) -> None: + """Callback that is fired when a tool call delta is encountered""" + + async def on_tool_call_done(self, tool_call: ToolCall) -> None: + """Callback that is fired when a tool call delta is encountered""" + + async def on_exception(self, exception: Exception) -> None: + """Fired whenever an exception happens during streaming""" + + async def on_timeout(self) -> None: + """Fires if the request times out""" + + async def on_message_created(self, message: Message) -> None: + """Callback that is fired when a message is created""" + + async def on_message_delta(self, delta: MessageDelta, snapshot: Message) -> None: + """Callback that is fired whenever a message delta is returned from the API + + The first argument is just the delta as sent by the API and the second argument + is the accumulated snapshot of the message. For example, a text content event may + look like this: + + # delta + MessageDeltaText( + index=0, + type='text', + text=Text( + value=' Jane' + ), + ) + # snapshot + MessageContentText( + index=0, + type='text', + text=Text( + value='Certainly, Jane' + ), + ) + """ + + async def on_message_done(self, message: Message) -> None: + """Callback that is fired when a message is completed""" + + async def on_text_created(self, text: Text) -> None: + """Callback that is fired when a text content block is created""" + + async def on_text_delta(self, delta: TextDelta, snapshot: Text) -> None: + """Callback that is fired whenever a text content delta is returned + by the API. + + The first argument is just the delta as sent by the API and the second argument + is the accumulated snapshot of the text. For example: + + on_text_delta(TextDelta(value="The"), Text(value="The")), + on_text_delta(TextDelta(value=" solution"), Text(value="The solution")), + on_text_delta(TextDelta(value=" to"), Text(value="The solution to")), + on_text_delta(TextDelta(value=" the"), Text(value="The solution to the")), + on_text_delta(TextDelta(value=" equation"), Text(value="The solution to the equivalent")), + """ + + async def on_text_done(self, text: Text) -> None: + """Callback that is fired when a text content block is finished""" + + async def on_image_file_done(self, image_file: ImageFile) -> None: + """Callback that is fired when an image file block is finished""" + + async def _emit_sse_event(self, event: AssistantStreamEvent) -> None: + self._current_event = event + await self.on_event(event) + + self.__current_message_snapshot, new_content = accumulate_event( + event=event, + current_message_snapshot=self.__current_message_snapshot, + ) + if self.__current_message_snapshot is not None: + self.__message_snapshots[self.__current_message_snapshot.id] = self.__current_message_snapshot + + accumulate_run_step( + event=event, + run_step_snapshots=self.__run_step_snapshots, + ) + + for content_delta in new_content: + assert self.__current_message_snapshot is not None + + block = self.__current_message_snapshot.content[content_delta.index] + if block.type == "text": + await self.on_text_created(block.text) + + if ( + event.event == "thread.run.completed" + or event.event == "thread.run.cancelled" + or event.event == "thread.run.expired" + or event.event == "thread.run.failed" + or event.event == "thread.run.requires_action" + ): + self.__current_run = event.data + if self._current_tool_call: + await self.on_tool_call_done(self._current_tool_call) + elif ( + event.event == "thread.run.created" + or event.event == "thread.run.in_progress" + or event.event == "thread.run.cancelling" + or event.event == "thread.run.queued" + ): + self.__current_run = event.data + elif event.event == "thread.message.created": + await self.on_message_created(event.data) + elif event.event == "thread.message.delta": + snapshot = self.__current_message_snapshot + assert snapshot is not None + + message_delta = event.data.delta + if message_delta.content is not None: + for content_delta in message_delta.content: + if content_delta.type == "text" and content_delta.text: + snapshot_content = snapshot.content[content_delta.index] + assert snapshot_content.type == "text" + await self.on_text_delta(content_delta.text, snapshot_content.text) + + # If the delta is for a new message content: + # - emit on_text_done/on_image_file_done for the previous message content + # - emit on_text_created/on_image_created for the new message content + if content_delta.index != self._current_message_content_index: + if self._current_message_content is not None: + if self._current_message_content.type == "text": + await self.on_text_done(self._current_message_content.text) + elif self._current_message_content.type == "image_file": + await self.on_image_file_done(self._current_message_content.image_file) + + self._current_message_content_index = content_delta.index + self._current_message_content = snapshot.content[content_delta.index] + + # Update the current_message_content (delta event is correctly emitted already) + self._current_message_content = snapshot.content[content_delta.index] + + await self.on_message_delta(event.data.delta, snapshot) + elif event.event == "thread.message.completed" or event.event == "thread.message.incomplete": + self.__current_message_snapshot = event.data + self.__message_snapshots[event.data.id] = event.data + + if self._current_message_content_index is not None: + content = event.data.content[self._current_message_content_index] + if content.type == "text": + await self.on_text_done(content.text) + elif content.type == "image_file": + await self.on_image_file_done(content.image_file) + + await self.on_message_done(event.data) + elif event.event == "thread.run.step.created": + self.__current_run_step_id = event.data.id + await self.on_run_step_created(event.data) + elif event.event == "thread.run.step.in_progress": + self.__current_run_step_id = event.data.id + elif event.event == "thread.run.step.delta": + step_snapshot = self.__run_step_snapshots[event.data.id] + + run_step_delta = event.data.delta + if ( + run_step_delta.step_details + and run_step_delta.step_details.type == "tool_calls" + and run_step_delta.step_details.tool_calls is not None + ): + assert step_snapshot.step_details.type == "tool_calls" + for tool_call_delta in run_step_delta.step_details.tool_calls: + if tool_call_delta.index == self._current_tool_call_index: + await self.on_tool_call_delta( + tool_call_delta, + step_snapshot.step_details.tool_calls[tool_call_delta.index], + ) + + # If the delta is for a new tool call: + # - emit on_tool_call_done for the previous tool_call + # - emit on_tool_call_created for the new tool_call + if tool_call_delta.index != self._current_tool_call_index: + if self._current_tool_call is not None: + await self.on_tool_call_done(self._current_tool_call) + + self._current_tool_call_index = tool_call_delta.index + self._current_tool_call = step_snapshot.step_details.tool_calls[tool_call_delta.index] + await self.on_tool_call_created(self._current_tool_call) + + # Update the current_tool_call (delta event is correctly emitted already) + self._current_tool_call = step_snapshot.step_details.tool_calls[tool_call_delta.index] + + await self.on_run_step_delta( + event.data.delta, + step_snapshot, + ) + elif ( + event.event == "thread.run.step.completed" + or event.event == "thread.run.step.cancelled" + or event.event == "thread.run.step.expired" + or event.event == "thread.run.step.failed" + ): + if self._current_tool_call: + await self.on_tool_call_done(self._current_tool_call) + + await self.on_run_step_done(event.data) + self.__current_run_step_id = None + elif event.event == "thread.created" or event.event == "thread.message.in_progress" or event.event == "error": + # currently no special handling + ... + else: + # we only want to error at build-time + if TYPE_CHECKING: # type: ignore[unreachable] + assert_never(event) + + self._current_event = None + + async def __stream__(self) -> AsyncIterator[AssistantStreamEvent]: + stream = self.__stream + if not stream: + raise RuntimeError("Stream has not been started yet") + + try: + async for event in stream: + await self._emit_sse_event(event) + + yield event + except (httpx.TimeoutException, asyncio.TimeoutError) as exc: + await self.on_timeout() + await self.on_exception(exc) + raise + except Exception as exc: + await self.on_exception(exc) + raise + finally: + await self.on_end() + + +AsyncAssistantEventHandlerT = TypeVar("AsyncAssistantEventHandlerT", bound=AsyncAssistantEventHandler) + + +class AsyncAssistantStreamManager(Generic[AsyncAssistantEventHandlerT]): + """Wrapper over AsyncAssistantStreamEventHandler that is returned by `.stream()` + so that an async context manager can be used without `await`ing the + original client call. + + ```py + async with client.threads.create_and_run_stream(...) as stream: + async for event in stream: + ... + ``` + """ + + def __init__( + self, + api_request: Awaitable[AsyncStream[AssistantStreamEvent]], + *, + event_handler: AsyncAssistantEventHandlerT, + ) -> None: + self.__stream: AsyncStream[AssistantStreamEvent] | None = None + self.__event_handler = event_handler + self.__api_request = api_request + + async def __aenter__(self) -> AsyncAssistantEventHandlerT: + self.__stream = await self.__api_request + self.__event_handler._init(self.__stream) + return self.__event_handler + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__stream is not None: + await self.__stream.close() + + +def accumulate_run_step( + *, + event: AssistantStreamEvent, + run_step_snapshots: dict[str, RunStep], +) -> None: + if event.event == "thread.run.step.created": + run_step_snapshots[event.data.id] = event.data + return + + if event.event == "thread.run.step.delta": + data = event.data + snapshot = run_step_snapshots[data.id] + + if data.delta: + merged = accumulate_delta( + cast( + "dict[object, object]", + snapshot.model_dump(exclude_unset=True), + ), + cast( + "dict[object, object]", + data.delta.model_dump(exclude_unset=True), + ), + ) + run_step_snapshots[snapshot.id] = cast(RunStep, construct_type(type_=RunStep, value=merged)) + + return None + + +def accumulate_event( + *, + event: AssistantStreamEvent, + current_message_snapshot: Message | None, +) -> tuple[Message | None, list[MessageContentDelta]]: + """Returns a tuple of message snapshot and newly created text message deltas""" + if event.event == "thread.message.created": + return event.data, [] + + new_content: list[MessageContentDelta] = [] + + if event.event != "thread.message.delta": + return current_message_snapshot, [] + + if not current_message_snapshot: + raise RuntimeError("Encountered a message delta with no previous snapshot") + + data = event.data + if data.delta.content: + for content_delta in data.delta.content: + try: + block = current_message_snapshot.content[content_delta.index] + except IndexError: + current_message_snapshot.content.insert( + content_delta.index, + cast( + MessageContent, + construct_type( + # mypy doesn't allow Content for some reason + type_=cast(Any, MessageContent), + value=content_delta.model_dump(exclude_unset=True), + ), + ), + ) + new_content.append(content_delta) + else: + merged = accumulate_delta( + cast( + "dict[object, object]", + block.model_dump(exclude_unset=True), + ), + cast( + "dict[object, object]", + content_delta.model_dump(exclude_unset=True), + ), + ) + current_message_snapshot.content[content_delta.index] = cast( + MessageContent, + construct_type( + # mypy doesn't allow Content for some reason + type_=cast(Any, MessageContent), + value=merged, + ), + ) + + return current_message_snapshot, new_content + + +def accumulate_delta(acc: dict[object, object], delta: dict[object, object]) -> dict[object, object]: + for key, delta_value in delta.items(): + if key not in acc: + acc[key] = delta_value + continue + + acc_value = acc[key] + if acc_value is None: + acc[key] = delta_value + continue + + # the `index` property is used in arrays of objects so it should + # not be accumulated like other values e.g. + # [{'foo': 'bar', 'index': 0}] + # + # the same applies to `type` properties as they're used for + # discriminated unions + if key == "index" or key == "type": + acc[key] = delta_value + continue + + if isinstance(acc_value, str) and isinstance(delta_value, str): + acc_value += delta_value + elif isinstance(acc_value, (int, float)) and isinstance(delta_value, (int, float)): + acc_value += delta_value + elif is_dict(acc_value) and is_dict(delta_value): + acc_value = accumulate_delta(acc_value, delta_value) + elif is_list(acc_value) and is_list(delta_value): + # for lists of non-dictionary items we'll only ever get new entries + # in the array, existing entries will never be changed + if all(isinstance(x, (str, int, float)) for x in acc_value): + acc_value.extend(delta_value) + continue + + for delta_entry in delta_value: + if not is_dict(delta_entry): + raise TypeError(f"Unexpected list delta entry is not a dictionary: {delta_entry}") + + try: + index = delta_entry["index"] + except KeyError as exc: + raise RuntimeError(f"Expected list delta entry to have an `index` key; {delta_entry}") from exc + + if not isinstance(index, int): + raise TypeError(f"Unexpected, list delta entry `index` value is not an integer; {index}") + + try: + acc_entry = acc_value[index] + except IndexError: + acc_value.insert(index, delta_entry) + else: + if not is_dict(acc_entry): + raise TypeError("not handled yet") + + acc_value[index] = accumulate_delta(acc_entry, delta_entry) + + acc[key] = acc_value + + return acc diff --git a/.venv/Lib/site-packages/openai/pagination.py b/.venv/Lib/site-packages/openai/pagination.py new file mode 100644 index 00000000..82936382 --- /dev/null +++ b/.venv/Lib/site-packages/openai/pagination.py @@ -0,0 +1,107 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Any, List, Generic, TypeVar, Optional, cast +from typing_extensions import Protocol, override, runtime_checkable + +from ._base_client import BasePage, PageInfo, BaseSyncPage, BaseAsyncPage + +__all__ = ["SyncPage", "AsyncPage", "SyncCursorPage", "AsyncCursorPage"] + +_T = TypeVar("_T") + + +@runtime_checkable +class CursorPageItem(Protocol): + id: Optional[str] + + +class SyncPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]): + """Note: no pagination actually occurs yet, this is for forwards-compatibility.""" + + data: List[_T] + object: str + + @override + def _get_page_items(self) -> List[_T]: + data = self.data + if not data: + return [] + return data + + @override + def next_page_info(self) -> None: + """ + This page represents a response that isn't actually paginated at the API level + so there will never be a next page. + """ + return None + + +class AsyncPage(BaseAsyncPage[_T], BasePage[_T], Generic[_T]): + """Note: no pagination actually occurs yet, this is for forwards-compatibility.""" + + data: List[_T] + object: str + + @override + def _get_page_items(self) -> List[_T]: + data = self.data + if not data: + return [] + return data + + @override + def next_page_info(self) -> None: + """ + This page represents a response that isn't actually paginated at the API level + so there will never be a next page. + """ + return None + + +class SyncCursorPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]): + data: List[_T] + + @override + def _get_page_items(self) -> List[_T]: + data = self.data + if not data: + return [] + return data + + @override + def next_page_info(self) -> Optional[PageInfo]: + data = self.data + if not data: + return None + + item = cast(Any, data[-1]) + if not isinstance(item, CursorPageItem) or item.id is None: + # TODO emit warning log + return None + + return PageInfo(params={"after": item.id}) + + +class AsyncCursorPage(BaseAsyncPage[_T], BasePage[_T], Generic[_T]): + data: List[_T] + + @override + def _get_page_items(self) -> List[_T]: + data = self.data + if not data: + return [] + return data + + @override + def next_page_info(self) -> Optional[PageInfo]: + data = self.data + if not data: + return None + + item = cast(Any, data[-1]) + if not isinstance(item, CursorPageItem) or item.id is None: + # TODO emit warning log + return None + + return PageInfo(params={"after": item.id}) diff --git a/.venv/Lib/site-packages/openai/py.typed b/.venv/Lib/site-packages/openai/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/openai/resources/__init__.py b/.venv/Lib/site-packages/openai/resources/__init__.py new file mode 100644 index 00000000..ecae4243 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/__init__.py @@ -0,0 +1,159 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .beta import ( + Beta, + AsyncBeta, + BetaWithRawResponse, + AsyncBetaWithRawResponse, + BetaWithStreamingResponse, + AsyncBetaWithStreamingResponse, +) +from .chat import ( + Chat, + AsyncChat, + ChatWithRawResponse, + AsyncChatWithRawResponse, + ChatWithStreamingResponse, + AsyncChatWithStreamingResponse, +) +from .audio import ( + Audio, + AsyncAudio, + AudioWithRawResponse, + AsyncAudioWithRawResponse, + AudioWithStreamingResponse, + AsyncAudioWithStreamingResponse, +) +from .files import ( + Files, + AsyncFiles, + FilesWithRawResponse, + AsyncFilesWithRawResponse, + FilesWithStreamingResponse, + AsyncFilesWithStreamingResponse, +) +from .images import ( + Images, + AsyncImages, + ImagesWithRawResponse, + AsyncImagesWithRawResponse, + ImagesWithStreamingResponse, + AsyncImagesWithStreamingResponse, +) +from .models import ( + Models, + AsyncModels, + ModelsWithRawResponse, + AsyncModelsWithRawResponse, + ModelsWithStreamingResponse, + AsyncModelsWithStreamingResponse, +) +from .batches import ( + Batches, + AsyncBatches, + BatchesWithRawResponse, + AsyncBatchesWithRawResponse, + BatchesWithStreamingResponse, + AsyncBatchesWithStreamingResponse, +) +from .embeddings import ( + Embeddings, + AsyncEmbeddings, + EmbeddingsWithRawResponse, + AsyncEmbeddingsWithRawResponse, + EmbeddingsWithStreamingResponse, + AsyncEmbeddingsWithStreamingResponse, +) +from .completions import ( + Completions, + AsyncCompletions, + CompletionsWithRawResponse, + AsyncCompletionsWithRawResponse, + CompletionsWithStreamingResponse, + AsyncCompletionsWithStreamingResponse, +) +from .fine_tuning import ( + FineTuning, + AsyncFineTuning, + FineTuningWithRawResponse, + AsyncFineTuningWithRawResponse, + FineTuningWithStreamingResponse, + AsyncFineTuningWithStreamingResponse, +) +from .moderations import ( + Moderations, + AsyncModerations, + ModerationsWithRawResponse, + AsyncModerationsWithRawResponse, + ModerationsWithStreamingResponse, + AsyncModerationsWithStreamingResponse, +) + +__all__ = [ + "Completions", + "AsyncCompletions", + "CompletionsWithRawResponse", + "AsyncCompletionsWithRawResponse", + "CompletionsWithStreamingResponse", + "AsyncCompletionsWithStreamingResponse", + "Chat", + "AsyncChat", + "ChatWithRawResponse", + "AsyncChatWithRawResponse", + "ChatWithStreamingResponse", + "AsyncChatWithStreamingResponse", + "Embeddings", + "AsyncEmbeddings", + "EmbeddingsWithRawResponse", + "AsyncEmbeddingsWithRawResponse", + "EmbeddingsWithStreamingResponse", + "AsyncEmbeddingsWithStreamingResponse", + "Files", + "AsyncFiles", + "FilesWithRawResponse", + "AsyncFilesWithRawResponse", + "FilesWithStreamingResponse", + "AsyncFilesWithStreamingResponse", + "Images", + "AsyncImages", + "ImagesWithRawResponse", + "AsyncImagesWithRawResponse", + "ImagesWithStreamingResponse", + "AsyncImagesWithStreamingResponse", + "Audio", + "AsyncAudio", + "AudioWithRawResponse", + "AsyncAudioWithRawResponse", + "AudioWithStreamingResponse", + "AsyncAudioWithStreamingResponse", + "Moderations", + "AsyncModerations", + "ModerationsWithRawResponse", + "AsyncModerationsWithRawResponse", + "ModerationsWithStreamingResponse", + "AsyncModerationsWithStreamingResponse", + "Models", + "AsyncModels", + "ModelsWithRawResponse", + "AsyncModelsWithRawResponse", + "ModelsWithStreamingResponse", + "AsyncModelsWithStreamingResponse", + "FineTuning", + "AsyncFineTuning", + "FineTuningWithRawResponse", + "AsyncFineTuningWithRawResponse", + "FineTuningWithStreamingResponse", + "AsyncFineTuningWithStreamingResponse", + "Beta", + "AsyncBeta", + "BetaWithRawResponse", + "AsyncBetaWithRawResponse", + "BetaWithStreamingResponse", + "AsyncBetaWithStreamingResponse", + "Batches", + "AsyncBatches", + "BatchesWithRawResponse", + "AsyncBatchesWithRawResponse", + "BatchesWithStreamingResponse", + "AsyncBatchesWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..a047378f Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/batches.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/batches.cpython-311.pyc new file mode 100644 index 00000000..f9675efd Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/batches.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/completions.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/completions.cpython-311.pyc new file mode 100644 index 00000000..482341be Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/completions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/embeddings.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/embeddings.cpython-311.pyc new file mode 100644 index 00000000..ef7ab8ba Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/embeddings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/files.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/files.cpython-311.pyc new file mode 100644 index 00000000..0e9890f7 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/files.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/images.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/images.cpython-311.pyc new file mode 100644 index 00000000..383a98f8 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/images.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/models.cpython-311.pyc new file mode 100644 index 00000000..fb5426fe Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/__pycache__/moderations.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/__pycache__/moderations.cpython-311.pyc new file mode 100644 index 00000000..e6950d77 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/__pycache__/moderations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/audio/__init__.py b/.venv/Lib/site-packages/openai/resources/audio/__init__.py new file mode 100644 index 00000000..7da1d2db --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/audio/__init__.py @@ -0,0 +1,61 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .audio import ( + Audio, + AsyncAudio, + AudioWithRawResponse, + AsyncAudioWithRawResponse, + AudioWithStreamingResponse, + AsyncAudioWithStreamingResponse, +) +from .speech import ( + Speech, + AsyncSpeech, + SpeechWithRawResponse, + AsyncSpeechWithRawResponse, + SpeechWithStreamingResponse, + AsyncSpeechWithStreamingResponse, +) +from .translations import ( + Translations, + AsyncTranslations, + TranslationsWithRawResponse, + AsyncTranslationsWithRawResponse, + TranslationsWithStreamingResponse, + AsyncTranslationsWithStreamingResponse, +) +from .transcriptions import ( + Transcriptions, + AsyncTranscriptions, + TranscriptionsWithRawResponse, + AsyncTranscriptionsWithRawResponse, + TranscriptionsWithStreamingResponse, + AsyncTranscriptionsWithStreamingResponse, +) + +__all__ = [ + "Transcriptions", + "AsyncTranscriptions", + "TranscriptionsWithRawResponse", + "AsyncTranscriptionsWithRawResponse", + "TranscriptionsWithStreamingResponse", + "AsyncTranscriptionsWithStreamingResponse", + "Translations", + "AsyncTranslations", + "TranslationsWithRawResponse", + "AsyncTranslationsWithRawResponse", + "TranslationsWithStreamingResponse", + "AsyncTranslationsWithStreamingResponse", + "Speech", + "AsyncSpeech", + "SpeechWithRawResponse", + "AsyncSpeechWithRawResponse", + "SpeechWithStreamingResponse", + "AsyncSpeechWithStreamingResponse", + "Audio", + "AsyncAudio", + "AudioWithRawResponse", + "AsyncAudioWithRawResponse", + "AudioWithStreamingResponse", + "AsyncAudioWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/audio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..880a2595 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/audio/__pycache__/audio.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/audio.cpython-311.pyc new file mode 100644 index 00000000..cf767c57 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/audio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/audio/__pycache__/speech.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/speech.cpython-311.pyc new file mode 100644 index 00000000..b6a9011c Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/speech.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/audio/__pycache__/transcriptions.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/transcriptions.cpython-311.pyc new file mode 100644 index 00000000..bf5dcfcd Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/transcriptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/audio/__pycache__/translations.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/translations.cpython-311.pyc new file mode 100644 index 00000000..e42ec5e9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/audio/__pycache__/translations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/audio/audio.py b/.venv/Lib/site-packages/openai/resources/audio/audio.py new file mode 100644 index 00000000..537ad573 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/audio/audio.py @@ -0,0 +1,144 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .speech import ( + Speech, + AsyncSpeech, + SpeechWithRawResponse, + AsyncSpeechWithRawResponse, + SpeechWithStreamingResponse, + AsyncSpeechWithStreamingResponse, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from .translations import ( + Translations, + AsyncTranslations, + TranslationsWithRawResponse, + AsyncTranslationsWithRawResponse, + TranslationsWithStreamingResponse, + AsyncTranslationsWithStreamingResponse, +) +from .transcriptions import ( + Transcriptions, + AsyncTranscriptions, + TranscriptionsWithRawResponse, + AsyncTranscriptionsWithRawResponse, + TranscriptionsWithStreamingResponse, + AsyncTranscriptionsWithStreamingResponse, +) + +__all__ = ["Audio", "AsyncAudio"] + + +class Audio(SyncAPIResource): + @cached_property + def transcriptions(self) -> Transcriptions: + return Transcriptions(self._client) + + @cached_property + def translations(self) -> Translations: + return Translations(self._client) + + @cached_property + def speech(self) -> Speech: + return Speech(self._client) + + @cached_property + def with_raw_response(self) -> AudioWithRawResponse: + return AudioWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AudioWithStreamingResponse: + return AudioWithStreamingResponse(self) + + +class AsyncAudio(AsyncAPIResource): + @cached_property + def transcriptions(self) -> AsyncTranscriptions: + return AsyncTranscriptions(self._client) + + @cached_property + def translations(self) -> AsyncTranslations: + return AsyncTranslations(self._client) + + @cached_property + def speech(self) -> AsyncSpeech: + return AsyncSpeech(self._client) + + @cached_property + def with_raw_response(self) -> AsyncAudioWithRawResponse: + return AsyncAudioWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncAudioWithStreamingResponse: + return AsyncAudioWithStreamingResponse(self) + + +class AudioWithRawResponse: + def __init__(self, audio: Audio) -> None: + self._audio = audio + + @cached_property + def transcriptions(self) -> TranscriptionsWithRawResponse: + return TranscriptionsWithRawResponse(self._audio.transcriptions) + + @cached_property + def translations(self) -> TranslationsWithRawResponse: + return TranslationsWithRawResponse(self._audio.translations) + + @cached_property + def speech(self) -> SpeechWithRawResponse: + return SpeechWithRawResponse(self._audio.speech) + + +class AsyncAudioWithRawResponse: + def __init__(self, audio: AsyncAudio) -> None: + self._audio = audio + + @cached_property + def transcriptions(self) -> AsyncTranscriptionsWithRawResponse: + return AsyncTranscriptionsWithRawResponse(self._audio.transcriptions) + + @cached_property + def translations(self) -> AsyncTranslationsWithRawResponse: + return AsyncTranslationsWithRawResponse(self._audio.translations) + + @cached_property + def speech(self) -> AsyncSpeechWithRawResponse: + return AsyncSpeechWithRawResponse(self._audio.speech) + + +class AudioWithStreamingResponse: + def __init__(self, audio: Audio) -> None: + self._audio = audio + + @cached_property + def transcriptions(self) -> TranscriptionsWithStreamingResponse: + return TranscriptionsWithStreamingResponse(self._audio.transcriptions) + + @cached_property + def translations(self) -> TranslationsWithStreamingResponse: + return TranslationsWithStreamingResponse(self._audio.translations) + + @cached_property + def speech(self) -> SpeechWithStreamingResponse: + return SpeechWithStreamingResponse(self._audio.speech) + + +class AsyncAudioWithStreamingResponse: + def __init__(self, audio: AsyncAudio) -> None: + self._audio = audio + + @cached_property + def transcriptions(self) -> AsyncTranscriptionsWithStreamingResponse: + return AsyncTranscriptionsWithStreamingResponse(self._audio.transcriptions) + + @cached_property + def translations(self) -> AsyncTranslationsWithStreamingResponse: + return AsyncTranslationsWithStreamingResponse(self._audio.translations) + + @cached_property + def speech(self) -> AsyncSpeechWithStreamingResponse: + return AsyncSpeechWithStreamingResponse(self._audio.speech) diff --git a/.venv/Lib/site-packages/openai/resources/audio/speech.py b/.venv/Lib/site-packages/openai/resources/audio/speech.py new file mode 100644 index 00000000..e26c5805 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/audio/speech.py @@ -0,0 +1,213 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal + +import httpx + +from ... import _legacy_response +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import ( + maybe_transform, + async_maybe_transform, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_custom_streamed_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from ...types.audio import speech_create_params +from ..._base_client import ( + make_request_options, +) + +__all__ = ["Speech", "AsyncSpeech"] + + +class Speech(SyncAPIResource): + @cached_property + def with_raw_response(self) -> SpeechWithRawResponse: + return SpeechWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> SpeechWithStreamingResponse: + return SpeechWithStreamingResponse(self) + + def create( + self, + *, + input: str, + model: Union[str, Literal["tts-1", "tts-1-hd"]], + voice: Literal["alloy", "echo", "fable", "onyx", "nova", "shimmer"], + response_format: Literal["mp3", "opus", "aac", "flac", "wav", "pcm"] | NotGiven = NOT_GIVEN, + speed: float | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> _legacy_response.HttpxBinaryResponseContent: + """ + Generates audio from the input text. + + Args: + input: The text to generate audio for. The maximum length is 4096 characters. + + model: + One of the available [TTS models](https://platform.openai.com/docs/models/tts): + `tts-1` or `tts-1-hd` + + voice: The voice to use when generating the audio. Supported voices are `alloy`, + `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are + available in the + [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech/voice-options). + + response_format: The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + `wav`, and `pcm`. + + speed: The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + the default. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return self._post( + "/audio/speech", + body=maybe_transform( + { + "input": input, + "model": model, + "voice": voice, + "response_format": response_format, + "speed": speed, + }, + speech_create_params.SpeechCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=_legacy_response.HttpxBinaryResponseContent, + ) + + +class AsyncSpeech(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncSpeechWithRawResponse: + return AsyncSpeechWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncSpeechWithStreamingResponse: + return AsyncSpeechWithStreamingResponse(self) + + async def create( + self, + *, + input: str, + model: Union[str, Literal["tts-1", "tts-1-hd"]], + voice: Literal["alloy", "echo", "fable", "onyx", "nova", "shimmer"], + response_format: Literal["mp3", "opus", "aac", "flac", "wav", "pcm"] | NotGiven = NOT_GIVEN, + speed: float | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> _legacy_response.HttpxBinaryResponseContent: + """ + Generates audio from the input text. + + Args: + input: The text to generate audio for. The maximum length is 4096 characters. + + model: + One of the available [TTS models](https://platform.openai.com/docs/models/tts): + `tts-1` or `tts-1-hd` + + voice: The voice to use when generating the audio. Supported voices are `alloy`, + `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are + available in the + [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech/voice-options). + + response_format: The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + `wav`, and `pcm`. + + speed: The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + the default. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return await self._post( + "/audio/speech", + body=await async_maybe_transform( + { + "input": input, + "model": model, + "voice": voice, + "response_format": response_format, + "speed": speed, + }, + speech_create_params.SpeechCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=_legacy_response.HttpxBinaryResponseContent, + ) + + +class SpeechWithRawResponse: + def __init__(self, speech: Speech) -> None: + self._speech = speech + + self.create = _legacy_response.to_raw_response_wrapper( + speech.create, + ) + + +class AsyncSpeechWithRawResponse: + def __init__(self, speech: AsyncSpeech) -> None: + self._speech = speech + + self.create = _legacy_response.async_to_raw_response_wrapper( + speech.create, + ) + + +class SpeechWithStreamingResponse: + def __init__(self, speech: Speech) -> None: + self._speech = speech + + self.create = to_custom_streamed_response_wrapper( + speech.create, + StreamedBinaryAPIResponse, + ) + + +class AsyncSpeechWithStreamingResponse: + def __init__(self, speech: AsyncSpeech) -> None: + self._speech = speech + + self.create = async_to_custom_streamed_response_wrapper( + speech.create, + AsyncStreamedBinaryAPIResponse, + ) diff --git a/.venv/Lib/site-packages/openai/resources/audio/transcriptions.py b/.venv/Lib/site-packages/openai/resources/audio/transcriptions.py new file mode 100644 index 00000000..99568018 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/audio/transcriptions.py @@ -0,0 +1,257 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union, Mapping, cast +from typing_extensions import Literal + +import httpx + +from ... import _legacy_response +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes +from ..._utils import ( + extract_files, + maybe_transform, + deepcopy_minimal, + async_maybe_transform, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ...types.audio import transcription_create_params +from ..._base_client import ( + make_request_options, +) +from ...types.audio.transcription import Transcription + +__all__ = ["Transcriptions", "AsyncTranscriptions"] + + +class Transcriptions(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TranscriptionsWithRawResponse: + return TranscriptionsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TranscriptionsWithStreamingResponse: + return TranscriptionsWithStreamingResponse(self) + + def create( + self, + *, + file: FileTypes, + model: Union[str, Literal["whisper-1"]], + language: str | NotGiven = NOT_GIVEN, + prompt: str | NotGiven = NOT_GIVEN, + response_format: Literal["json", "text", "srt", "verbose_json", "vtt"] | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + timestamp_granularities: List[Literal["word", "segment"]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Transcription: + """ + Transcribes audio into the input language. + + Args: + file: + The audio file object (not file name) to transcribe, in one of these formats: + flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + + model: ID of the model to use. Only `whisper-1` (which is powered by our open source + Whisper V2 model) is currently available. + + language: The language of the input audio. Supplying the input language in + [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will + improve accuracy and latency. + + prompt: An optional text to guide the model's style or continue a previous audio + segment. The + [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) + should match the audio language. + + response_format: The format of the transcript output, in one of these options: `json`, `text`, + `srt`, `verbose_json`, or `vtt`. + + temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + output more random, while lower values like 0.2 will make it more focused and + deterministic. If set to 0, the model will use + [log probability](https://en.wikipedia.org/wiki/Log_probability) to + automatically increase the temperature until certain thresholds are hit. + + timestamp_granularities: The timestamp granularities to populate for this transcription. + `response_format` must be set `verbose_json` to use timestamp granularities. + Either or both of these options are supported: `word`, or `segment`. Note: There + is no additional latency for segment timestamps, but generating word timestamps + incurs additional latency. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "model": model, + "language": language, + "prompt": prompt, + "response_format": response_format, + "temperature": temperature, + "timestamp_granularities": timestamp_granularities, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return self._post( + "/audio/transcriptions", + body=maybe_transform(body, transcription_create_params.TranscriptionCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Transcription, + ) + + +class AsyncTranscriptions(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTranscriptionsWithRawResponse: + return AsyncTranscriptionsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTranscriptionsWithStreamingResponse: + return AsyncTranscriptionsWithStreamingResponse(self) + + async def create( + self, + *, + file: FileTypes, + model: Union[str, Literal["whisper-1"]], + language: str | NotGiven = NOT_GIVEN, + prompt: str | NotGiven = NOT_GIVEN, + response_format: Literal["json", "text", "srt", "verbose_json", "vtt"] | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + timestamp_granularities: List[Literal["word", "segment"]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Transcription: + """ + Transcribes audio into the input language. + + Args: + file: + The audio file object (not file name) to transcribe, in one of these formats: + flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + + model: ID of the model to use. Only `whisper-1` (which is powered by our open source + Whisper V2 model) is currently available. + + language: The language of the input audio. Supplying the input language in + [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will + improve accuracy and latency. + + prompt: An optional text to guide the model's style or continue a previous audio + segment. The + [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) + should match the audio language. + + response_format: The format of the transcript output, in one of these options: `json`, `text`, + `srt`, `verbose_json`, or `vtt`. + + temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + output more random, while lower values like 0.2 will make it more focused and + deterministic. If set to 0, the model will use + [log probability](https://en.wikipedia.org/wiki/Log_probability) to + automatically increase the temperature until certain thresholds are hit. + + timestamp_granularities: The timestamp granularities to populate for this transcription. + `response_format` must be set `verbose_json` to use timestamp granularities. + Either or both of these options are supported: `word`, or `segment`. Note: There + is no additional latency for segment timestamps, but generating word timestamps + incurs additional latency. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "model": model, + "language": language, + "prompt": prompt, + "response_format": response_format, + "temperature": temperature, + "timestamp_granularities": timestamp_granularities, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return await self._post( + "/audio/transcriptions", + body=await async_maybe_transform(body, transcription_create_params.TranscriptionCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Transcription, + ) + + +class TranscriptionsWithRawResponse: + def __init__(self, transcriptions: Transcriptions) -> None: + self._transcriptions = transcriptions + + self.create = _legacy_response.to_raw_response_wrapper( + transcriptions.create, + ) + + +class AsyncTranscriptionsWithRawResponse: + def __init__(self, transcriptions: AsyncTranscriptions) -> None: + self._transcriptions = transcriptions + + self.create = _legacy_response.async_to_raw_response_wrapper( + transcriptions.create, + ) + + +class TranscriptionsWithStreamingResponse: + def __init__(self, transcriptions: Transcriptions) -> None: + self._transcriptions = transcriptions + + self.create = to_streamed_response_wrapper( + transcriptions.create, + ) + + +class AsyncTranscriptionsWithStreamingResponse: + def __init__(self, transcriptions: AsyncTranscriptions) -> None: + self._transcriptions = transcriptions + + self.create = async_to_streamed_response_wrapper( + transcriptions.create, + ) diff --git a/.venv/Lib/site-packages/openai/resources/audio/translations.py b/.venv/Lib/site-packages/openai/resources/audio/translations.py new file mode 100644 index 00000000..d711ee2f --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/audio/translations.py @@ -0,0 +1,227 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Mapping, cast +from typing_extensions import Literal + +import httpx + +from ... import _legacy_response +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes +from ..._utils import ( + extract_files, + maybe_transform, + deepcopy_minimal, + async_maybe_transform, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ...types.audio import translation_create_params +from ..._base_client import ( + make_request_options, +) +from ...types.audio.translation import Translation + +__all__ = ["Translations", "AsyncTranslations"] + + +class Translations(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TranslationsWithRawResponse: + return TranslationsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TranslationsWithStreamingResponse: + return TranslationsWithStreamingResponse(self) + + def create( + self, + *, + file: FileTypes, + model: Union[str, Literal["whisper-1"]], + prompt: str | NotGiven = NOT_GIVEN, + response_format: str | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Translation: + """ + Translates audio into English. + + Args: + file: The audio file object (not file name) translate, in one of these formats: flac, + mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + + model: ID of the model to use. Only `whisper-1` (which is powered by our open source + Whisper V2 model) is currently available. + + prompt: An optional text to guide the model's style or continue a previous audio + segment. The + [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) + should be in English. + + response_format: The format of the transcript output, in one of these options: `json`, `text`, + `srt`, `verbose_json`, or `vtt`. + + temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + output more random, while lower values like 0.2 will make it more focused and + deterministic. If set to 0, the model will use + [log probability](https://en.wikipedia.org/wiki/Log_probability) to + automatically increase the temperature until certain thresholds are hit. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "model": model, + "prompt": prompt, + "response_format": response_format, + "temperature": temperature, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return self._post( + "/audio/translations", + body=maybe_transform(body, translation_create_params.TranslationCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Translation, + ) + + +class AsyncTranslations(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTranslationsWithRawResponse: + return AsyncTranslationsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTranslationsWithStreamingResponse: + return AsyncTranslationsWithStreamingResponse(self) + + async def create( + self, + *, + file: FileTypes, + model: Union[str, Literal["whisper-1"]], + prompt: str | NotGiven = NOT_GIVEN, + response_format: str | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Translation: + """ + Translates audio into English. + + Args: + file: The audio file object (not file name) translate, in one of these formats: flac, + mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + + model: ID of the model to use. Only `whisper-1` (which is powered by our open source + Whisper V2 model) is currently available. + + prompt: An optional text to guide the model's style or continue a previous audio + segment. The + [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) + should be in English. + + response_format: The format of the transcript output, in one of these options: `json`, `text`, + `srt`, `verbose_json`, or `vtt`. + + temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + output more random, while lower values like 0.2 will make it more focused and + deterministic. If set to 0, the model will use + [log probability](https://en.wikipedia.org/wiki/Log_probability) to + automatically increase the temperature until certain thresholds are hit. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "model": model, + "prompt": prompt, + "response_format": response_format, + "temperature": temperature, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return await self._post( + "/audio/translations", + body=await async_maybe_transform(body, translation_create_params.TranslationCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Translation, + ) + + +class TranslationsWithRawResponse: + def __init__(self, translations: Translations) -> None: + self._translations = translations + + self.create = _legacy_response.to_raw_response_wrapper( + translations.create, + ) + + +class AsyncTranslationsWithRawResponse: + def __init__(self, translations: AsyncTranslations) -> None: + self._translations = translations + + self.create = _legacy_response.async_to_raw_response_wrapper( + translations.create, + ) + + +class TranslationsWithStreamingResponse: + def __init__(self, translations: Translations) -> None: + self._translations = translations + + self.create = to_streamed_response_wrapper( + translations.create, + ) + + +class AsyncTranslationsWithStreamingResponse: + def __init__(self, translations: AsyncTranslations) -> None: + self._translations = translations + + self.create = async_to_streamed_response_wrapper( + translations.create, + ) diff --git a/.venv/Lib/site-packages/openai/resources/batches.py b/.venv/Lib/site-packages/openai/resources/batches.py new file mode 100644 index 00000000..64a3014c --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/batches.py @@ -0,0 +1,475 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional +from typing_extensions import Literal + +import httpx + +from .. import _legacy_response +from ..types import batch_list_params, batch_create_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import ( + maybe_transform, + async_maybe_transform, +) +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ..pagination import SyncCursorPage, AsyncCursorPage +from ..types.batch import Batch +from .._base_client import ( + AsyncPaginator, + make_request_options, +) + +__all__ = ["Batches", "AsyncBatches"] + + +class Batches(SyncAPIResource): + @cached_property + def with_raw_response(self) -> BatchesWithRawResponse: + return BatchesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BatchesWithStreamingResponse: + return BatchesWithStreamingResponse(self) + + def create( + self, + *, + completion_window: Literal["24h"], + endpoint: Literal["/v1/chat/completions", "/v1/embeddings"], + input_file_id: str, + metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Batch: + """ + Creates and executes a batch from an uploaded file of requests + + Args: + completion_window: The time frame within which the batch should be processed. Currently only `24h` + is supported. + + endpoint: The endpoint to be used for all requests in the batch. Currently + `/v1/chat/completions` and `/v1/embeddings` are supported. + + input_file_id: The ID of an uploaded file that contains requests for the new batch. + + See [upload file](https://platform.openai.com/docs/api-reference/files/create) + for how to upload a file. + + Your input file must be formatted as a + [JSONL file](https://platform.openai.com/docs/api-reference/batch/requestInput), + and must be uploaded with the purpose `batch`. + + metadata: Optional custom metadata for the batch. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/batches", + body=maybe_transform( + { + "completion_window": completion_window, + "endpoint": endpoint, + "input_file_id": input_file_id, + "metadata": metadata, + }, + batch_create_params.BatchCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Batch, + ) + + def retrieve( + self, + batch_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Batch: + """ + Retrieves a batch. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + return self._get( + f"/batches/{batch_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Batch, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[Batch]: + """List your organization's batches. + + Args: + after: A cursor for use in pagination. + + `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/batches", + page=SyncCursorPage[Batch], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + batch_list_params.BatchListParams, + ), + ), + model=Batch, + ) + + def cancel( + self, + batch_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Batch: + """ + Cancels an in-progress batch. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + return self._post( + f"/batches/{batch_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Batch, + ) + + +class AsyncBatches(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncBatchesWithRawResponse: + return AsyncBatchesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBatchesWithStreamingResponse: + return AsyncBatchesWithStreamingResponse(self) + + async def create( + self, + *, + completion_window: Literal["24h"], + endpoint: Literal["/v1/chat/completions", "/v1/embeddings"], + input_file_id: str, + metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Batch: + """ + Creates and executes a batch from an uploaded file of requests + + Args: + completion_window: The time frame within which the batch should be processed. Currently only `24h` + is supported. + + endpoint: The endpoint to be used for all requests in the batch. Currently + `/v1/chat/completions` and `/v1/embeddings` are supported. + + input_file_id: The ID of an uploaded file that contains requests for the new batch. + + See [upload file](https://platform.openai.com/docs/api-reference/files/create) + for how to upload a file. + + Your input file must be formatted as a + [JSONL file](https://platform.openai.com/docs/api-reference/batch/requestInput), + and must be uploaded with the purpose `batch`. + + metadata: Optional custom metadata for the batch. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/batches", + body=await async_maybe_transform( + { + "completion_window": completion_window, + "endpoint": endpoint, + "input_file_id": input_file_id, + "metadata": metadata, + }, + batch_create_params.BatchCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Batch, + ) + + async def retrieve( + self, + batch_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Batch: + """ + Retrieves a batch. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + return await self._get( + f"/batches/{batch_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Batch, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[Batch, AsyncCursorPage[Batch]]: + """List your organization's batches. + + Args: + after: A cursor for use in pagination. + + `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/batches", + page=AsyncCursorPage[Batch], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + batch_list_params.BatchListParams, + ), + ), + model=Batch, + ) + + async def cancel( + self, + batch_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Batch: + """ + Cancels an in-progress batch. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + return await self._post( + f"/batches/{batch_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Batch, + ) + + +class BatchesWithRawResponse: + def __init__(self, batches: Batches) -> None: + self._batches = batches + + self.create = _legacy_response.to_raw_response_wrapper( + batches.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + batches.retrieve, + ) + self.list = _legacy_response.to_raw_response_wrapper( + batches.list, + ) + self.cancel = _legacy_response.to_raw_response_wrapper( + batches.cancel, + ) + + +class AsyncBatchesWithRawResponse: + def __init__(self, batches: AsyncBatches) -> None: + self._batches = batches + + self.create = _legacy_response.async_to_raw_response_wrapper( + batches.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + batches.retrieve, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + batches.list, + ) + self.cancel = _legacy_response.async_to_raw_response_wrapper( + batches.cancel, + ) + + +class BatchesWithStreamingResponse: + def __init__(self, batches: Batches) -> None: + self._batches = batches + + self.create = to_streamed_response_wrapper( + batches.create, + ) + self.retrieve = to_streamed_response_wrapper( + batches.retrieve, + ) + self.list = to_streamed_response_wrapper( + batches.list, + ) + self.cancel = to_streamed_response_wrapper( + batches.cancel, + ) + + +class AsyncBatchesWithStreamingResponse: + def __init__(self, batches: AsyncBatches) -> None: + self._batches = batches + + self.create = async_to_streamed_response_wrapper( + batches.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + batches.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + batches.list, + ) + self.cancel = async_to_streamed_response_wrapper( + batches.cancel, + ) diff --git a/.venv/Lib/site-packages/openai/resources/beta/__init__.py b/.venv/Lib/site-packages/openai/resources/beta/__init__.py new file mode 100644 index 00000000..01f53387 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/__init__.py @@ -0,0 +1,61 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .beta import ( + Beta, + AsyncBeta, + BetaWithRawResponse, + AsyncBetaWithRawResponse, + BetaWithStreamingResponse, + AsyncBetaWithStreamingResponse, +) +from .threads import ( + Threads, + AsyncThreads, + ThreadsWithRawResponse, + AsyncThreadsWithRawResponse, + ThreadsWithStreamingResponse, + AsyncThreadsWithStreamingResponse, +) +from .assistants import ( + Assistants, + AsyncAssistants, + AssistantsWithRawResponse, + AsyncAssistantsWithRawResponse, + AssistantsWithStreamingResponse, + AsyncAssistantsWithStreamingResponse, +) +from .vector_stores import ( + VectorStores, + AsyncVectorStores, + VectorStoresWithRawResponse, + AsyncVectorStoresWithRawResponse, + VectorStoresWithStreamingResponse, + AsyncVectorStoresWithStreamingResponse, +) + +__all__ = [ + "VectorStores", + "AsyncVectorStores", + "VectorStoresWithRawResponse", + "AsyncVectorStoresWithRawResponse", + "VectorStoresWithStreamingResponse", + "AsyncVectorStoresWithStreamingResponse", + "Assistants", + "AsyncAssistants", + "AssistantsWithRawResponse", + "AsyncAssistantsWithRawResponse", + "AssistantsWithStreamingResponse", + "AsyncAssistantsWithStreamingResponse", + "Threads", + "AsyncThreads", + "ThreadsWithRawResponse", + "AsyncThreadsWithRawResponse", + "ThreadsWithStreamingResponse", + "AsyncThreadsWithStreamingResponse", + "Beta", + "AsyncBeta", + "BetaWithRawResponse", + "AsyncBetaWithRawResponse", + "BetaWithStreamingResponse", + "AsyncBetaWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/beta/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..e0bbc4a9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/__pycache__/assistants.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/__pycache__/assistants.cpython-311.pyc new file mode 100644 index 00000000..e63b8298 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/__pycache__/assistants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/__pycache__/beta.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/__pycache__/beta.cpython-311.pyc new file mode 100644 index 00000000..67f9c547 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/__pycache__/beta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/assistants.py b/.venv/Lib/site-packages/openai/resources/beta/assistants.py new file mode 100644 index 00000000..923ad95a --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/assistants.py @@ -0,0 +1,888 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable, Optional +from typing_extensions import Literal + +import httpx + +from ... import _legacy_response +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import ( + maybe_transform, + async_maybe_transform, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ...pagination import SyncCursorPage, AsyncCursorPage +from ...types.beta import ( + assistant_list_params, + assistant_create_params, + assistant_update_params, +) +from ..._base_client import ( + AsyncPaginator, + make_request_options, +) +from ...types.beta.assistant import Assistant +from ...types.beta.assistant_deleted import AssistantDeleted +from ...types.beta.assistant_tool_param import AssistantToolParam +from ...types.beta.assistant_response_format_option_param import AssistantResponseFormatOptionParam + +__all__ = ["Assistants", "AsyncAssistants"] + + +class Assistants(SyncAPIResource): + @cached_property + def with_raw_response(self) -> AssistantsWithRawResponse: + return AssistantsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AssistantsWithStreamingResponse: + return AssistantsWithStreamingResponse(self) + + def create( + self, + *, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + ], + description: Optional[str] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_resources: Optional[assistant_create_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Iterable[AssistantToolParam] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Assistant: + """ + Create an assistant with a model and instructions. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + description: The description of the assistant. The maximum length is 512 characters. + + instructions: The system instructions that the assistant uses. The maximum length is 256,000 + characters. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + name: The name of the assistant. The maximum length is 256 characters. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: A list of tool enabled on the assistant. There can be a maximum of 128 tools per + assistant. Tools can be of types `code_interpreter`, `file_search`, or + `function`. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + "/assistants", + body=maybe_transform( + { + "model": model, + "description": description, + "instructions": instructions, + "metadata": metadata, + "name": name, + "response_format": response_format, + "temperature": temperature, + "tool_resources": tool_resources, + "tools": tools, + "top_p": top_p, + }, + assistant_create_params.AssistantCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Assistant, + ) + + def retrieve( + self, + assistant_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Assistant: + """ + Retrieves an assistant. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not assistant_id: + raise ValueError(f"Expected a non-empty value for `assistant_id` but received {assistant_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/assistants/{assistant_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Assistant, + ) + + def update( + self, + assistant_id: str, + *, + description: Optional[str] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: str | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_resources: Optional[assistant_update_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Iterable[AssistantToolParam] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Assistant: + """Modifies an assistant. + + Args: + description: The description of the assistant. + + The maximum length is 512 characters. + + instructions: The system instructions that the assistant uses. The maximum length is 256,000 + characters. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + name: The name of the assistant. The maximum length is 256 characters. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: A list of tool enabled on the assistant. There can be a maximum of 128 tools per + assistant. Tools can be of types `code_interpreter`, `file_search`, or + `function`. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not assistant_id: + raise ValueError(f"Expected a non-empty value for `assistant_id` but received {assistant_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/assistants/{assistant_id}", + body=maybe_transform( + { + "description": description, + "instructions": instructions, + "metadata": metadata, + "model": model, + "name": name, + "response_format": response_format, + "temperature": temperature, + "tool_resources": tool_resources, + "tools": tools, + "top_p": top_p, + }, + assistant_update_params.AssistantUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Assistant, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[Assistant]: + """Returns a list of assistants. + + Args: + after: A cursor for use in pagination. + + `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + "/assistants", + page=SyncCursorPage[Assistant], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + assistant_list_params.AssistantListParams, + ), + ), + model=Assistant, + ) + + def delete( + self, + assistant_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantDeleted: + """ + Delete an assistant. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not assistant_id: + raise ValueError(f"Expected a non-empty value for `assistant_id` but received {assistant_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._delete( + f"/assistants/{assistant_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AssistantDeleted, + ) + + +class AsyncAssistants(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncAssistantsWithRawResponse: + return AsyncAssistantsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncAssistantsWithStreamingResponse: + return AsyncAssistantsWithStreamingResponse(self) + + async def create( + self, + *, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + ], + description: Optional[str] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_resources: Optional[assistant_create_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Iterable[AssistantToolParam] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Assistant: + """ + Create an assistant with a model and instructions. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + description: The description of the assistant. The maximum length is 512 characters. + + instructions: The system instructions that the assistant uses. The maximum length is 256,000 + characters. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + name: The name of the assistant. The maximum length is 256 characters. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: A list of tool enabled on the assistant. There can be a maximum of 128 tools per + assistant. Tools can be of types `code_interpreter`, `file_search`, or + `function`. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + "/assistants", + body=await async_maybe_transform( + { + "model": model, + "description": description, + "instructions": instructions, + "metadata": metadata, + "name": name, + "response_format": response_format, + "temperature": temperature, + "tool_resources": tool_resources, + "tools": tools, + "top_p": top_p, + }, + assistant_create_params.AssistantCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Assistant, + ) + + async def retrieve( + self, + assistant_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Assistant: + """ + Retrieves an assistant. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not assistant_id: + raise ValueError(f"Expected a non-empty value for `assistant_id` but received {assistant_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/assistants/{assistant_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Assistant, + ) + + async def update( + self, + assistant_id: str, + *, + description: Optional[str] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: str | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_resources: Optional[assistant_update_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Iterable[AssistantToolParam] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Assistant: + """Modifies an assistant. + + Args: + description: The description of the assistant. + + The maximum length is 512 characters. + + instructions: The system instructions that the assistant uses. The maximum length is 256,000 + characters. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + name: The name of the assistant. The maximum length is 256 characters. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: A list of tool enabled on the assistant. There can be a maximum of 128 tools per + assistant. Tools can be of types `code_interpreter`, `file_search`, or + `function`. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not assistant_id: + raise ValueError(f"Expected a non-empty value for `assistant_id` but received {assistant_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/assistants/{assistant_id}", + body=await async_maybe_transform( + { + "description": description, + "instructions": instructions, + "metadata": metadata, + "model": model, + "name": name, + "response_format": response_format, + "temperature": temperature, + "tool_resources": tool_resources, + "tools": tools, + "top_p": top_p, + }, + assistant_update_params.AssistantUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Assistant, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[Assistant, AsyncCursorPage[Assistant]]: + """Returns a list of assistants. + + Args: + after: A cursor for use in pagination. + + `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + "/assistants", + page=AsyncCursorPage[Assistant], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + assistant_list_params.AssistantListParams, + ), + ), + model=Assistant, + ) + + async def delete( + self, + assistant_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantDeleted: + """ + Delete an assistant. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not assistant_id: + raise ValueError(f"Expected a non-empty value for `assistant_id` but received {assistant_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._delete( + f"/assistants/{assistant_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AssistantDeleted, + ) + + +class AssistantsWithRawResponse: + def __init__(self, assistants: Assistants) -> None: + self._assistants = assistants + + self.create = _legacy_response.to_raw_response_wrapper( + assistants.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + assistants.retrieve, + ) + self.update = _legacy_response.to_raw_response_wrapper( + assistants.update, + ) + self.list = _legacy_response.to_raw_response_wrapper( + assistants.list, + ) + self.delete = _legacy_response.to_raw_response_wrapper( + assistants.delete, + ) + + +class AsyncAssistantsWithRawResponse: + def __init__(self, assistants: AsyncAssistants) -> None: + self._assistants = assistants + + self.create = _legacy_response.async_to_raw_response_wrapper( + assistants.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + assistants.retrieve, + ) + self.update = _legacy_response.async_to_raw_response_wrapper( + assistants.update, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + assistants.list, + ) + self.delete = _legacy_response.async_to_raw_response_wrapper( + assistants.delete, + ) + + +class AssistantsWithStreamingResponse: + def __init__(self, assistants: Assistants) -> None: + self._assistants = assistants + + self.create = to_streamed_response_wrapper( + assistants.create, + ) + self.retrieve = to_streamed_response_wrapper( + assistants.retrieve, + ) + self.update = to_streamed_response_wrapper( + assistants.update, + ) + self.list = to_streamed_response_wrapper( + assistants.list, + ) + self.delete = to_streamed_response_wrapper( + assistants.delete, + ) + + +class AsyncAssistantsWithStreamingResponse: + def __init__(self, assistants: AsyncAssistants) -> None: + self._assistants = assistants + + self.create = async_to_streamed_response_wrapper( + assistants.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + assistants.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + assistants.update, + ) + self.list = async_to_streamed_response_wrapper( + assistants.list, + ) + self.delete = async_to_streamed_response_wrapper( + assistants.delete, + ) diff --git a/.venv/Lib/site-packages/openai/resources/beta/beta.py b/.venv/Lib/site-packages/openai/resources/beta/beta.py new file mode 100644 index 00000000..0d980667 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/beta.py @@ -0,0 +1,146 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .threads import ( + Threads, + AsyncThreads, + ThreadsWithRawResponse, + AsyncThreadsWithRawResponse, + ThreadsWithStreamingResponse, + AsyncThreadsWithStreamingResponse, +) +from ..._compat import cached_property +from .assistants import ( + Assistants, + AsyncAssistants, + AssistantsWithRawResponse, + AsyncAssistantsWithRawResponse, + AssistantsWithStreamingResponse, + AsyncAssistantsWithStreamingResponse, +) +from ..._resource import SyncAPIResource, AsyncAPIResource +from .vector_stores import ( + VectorStores, + AsyncVectorStores, + VectorStoresWithRawResponse, + AsyncVectorStoresWithRawResponse, + VectorStoresWithStreamingResponse, + AsyncVectorStoresWithStreamingResponse, +) +from .threads.threads import Threads, AsyncThreads +from .vector_stores.vector_stores import VectorStores, AsyncVectorStores + +__all__ = ["Beta", "AsyncBeta"] + + +class Beta(SyncAPIResource): + @cached_property + def vector_stores(self) -> VectorStores: + return VectorStores(self._client) + + @cached_property + def assistants(self) -> Assistants: + return Assistants(self._client) + + @cached_property + def threads(self) -> Threads: + return Threads(self._client) + + @cached_property + def with_raw_response(self) -> BetaWithRawResponse: + return BetaWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BetaWithStreamingResponse: + return BetaWithStreamingResponse(self) + + +class AsyncBeta(AsyncAPIResource): + @cached_property + def vector_stores(self) -> AsyncVectorStores: + return AsyncVectorStores(self._client) + + @cached_property + def assistants(self) -> AsyncAssistants: + return AsyncAssistants(self._client) + + @cached_property + def threads(self) -> AsyncThreads: + return AsyncThreads(self._client) + + @cached_property + def with_raw_response(self) -> AsyncBetaWithRawResponse: + return AsyncBetaWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBetaWithStreamingResponse: + return AsyncBetaWithStreamingResponse(self) + + +class BetaWithRawResponse: + def __init__(self, beta: Beta) -> None: + self._beta = beta + + @cached_property + def vector_stores(self) -> VectorStoresWithRawResponse: + return VectorStoresWithRawResponse(self._beta.vector_stores) + + @cached_property + def assistants(self) -> AssistantsWithRawResponse: + return AssistantsWithRawResponse(self._beta.assistants) + + @cached_property + def threads(self) -> ThreadsWithRawResponse: + return ThreadsWithRawResponse(self._beta.threads) + + +class AsyncBetaWithRawResponse: + def __init__(self, beta: AsyncBeta) -> None: + self._beta = beta + + @cached_property + def vector_stores(self) -> AsyncVectorStoresWithRawResponse: + return AsyncVectorStoresWithRawResponse(self._beta.vector_stores) + + @cached_property + def assistants(self) -> AsyncAssistantsWithRawResponse: + return AsyncAssistantsWithRawResponse(self._beta.assistants) + + @cached_property + def threads(self) -> AsyncThreadsWithRawResponse: + return AsyncThreadsWithRawResponse(self._beta.threads) + + +class BetaWithStreamingResponse: + def __init__(self, beta: Beta) -> None: + self._beta = beta + + @cached_property + def vector_stores(self) -> VectorStoresWithStreamingResponse: + return VectorStoresWithStreamingResponse(self._beta.vector_stores) + + @cached_property + def assistants(self) -> AssistantsWithStreamingResponse: + return AssistantsWithStreamingResponse(self._beta.assistants) + + @cached_property + def threads(self) -> ThreadsWithStreamingResponse: + return ThreadsWithStreamingResponse(self._beta.threads) + + +class AsyncBetaWithStreamingResponse: + def __init__(self, beta: AsyncBeta) -> None: + self._beta = beta + + @cached_property + def vector_stores(self) -> AsyncVectorStoresWithStreamingResponse: + return AsyncVectorStoresWithStreamingResponse(self._beta.vector_stores) + + @cached_property + def assistants(self) -> AsyncAssistantsWithStreamingResponse: + return AsyncAssistantsWithStreamingResponse(self._beta.assistants) + + @cached_property + def threads(self) -> AsyncThreadsWithStreamingResponse: + return AsyncThreadsWithStreamingResponse(self._beta.threads) diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/__init__.py b/.venv/Lib/site-packages/openai/resources/beta/threads/__init__.py new file mode 100644 index 00000000..a66e445b --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/threads/__init__.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .runs import ( + Runs, + AsyncRuns, + RunsWithRawResponse, + AsyncRunsWithRawResponse, + RunsWithStreamingResponse, + AsyncRunsWithStreamingResponse, +) +from .threads import ( + Threads, + AsyncThreads, + ThreadsWithRawResponse, + AsyncThreadsWithRawResponse, + ThreadsWithStreamingResponse, + AsyncThreadsWithStreamingResponse, +) +from .messages import ( + Messages, + AsyncMessages, + MessagesWithRawResponse, + AsyncMessagesWithRawResponse, + MessagesWithStreamingResponse, + AsyncMessagesWithStreamingResponse, +) + +__all__ = [ + "Runs", + "AsyncRuns", + "RunsWithRawResponse", + "AsyncRunsWithRawResponse", + "RunsWithStreamingResponse", + "AsyncRunsWithStreamingResponse", + "Messages", + "AsyncMessages", + "MessagesWithRawResponse", + "AsyncMessagesWithRawResponse", + "MessagesWithStreamingResponse", + "AsyncMessagesWithStreamingResponse", + "Threads", + "AsyncThreads", + "ThreadsWithRawResponse", + "AsyncThreadsWithRawResponse", + "ThreadsWithStreamingResponse", + "AsyncThreadsWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..aeb13484 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/messages.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/messages.cpython-311.pyc new file mode 100644 index 00000000..e792128d Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/messages.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/threads.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/threads.cpython-311.pyc new file mode 100644 index 00000000..36ea6bd5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/threads/__pycache__/threads.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/messages.py b/.venv/Lib/site-packages/openai/resources/beta/threads/messages.py new file mode 100644 index 00000000..0799feed --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/threads/messages.py @@ -0,0 +1,638 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable, Optional +from typing_extensions import Literal + +import httpx + +from .... import _legacy_response +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._utils import ( + maybe_transform, + async_maybe_transform, +) +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ....pagination import SyncCursorPage, AsyncCursorPage +from ...._base_client import ( + AsyncPaginator, + make_request_options, +) +from ....types.beta.threads import message_list_params, message_create_params, message_update_params +from ....types.beta.threads.message import Message +from ....types.beta.threads.message_deleted import MessageDeleted + +__all__ = ["Messages", "AsyncMessages"] + + +class Messages(SyncAPIResource): + @cached_property + def with_raw_response(self) -> MessagesWithRawResponse: + return MessagesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> MessagesWithStreamingResponse: + return MessagesWithStreamingResponse(self) + + def create( + self, + thread_id: str, + *, + content: str, + role: Literal["user", "assistant"], + attachments: Optional[Iterable[message_create_params.Attachment]] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Message: + """ + Create a message. + + Args: + content: The content of the message. + + role: + The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. + + attachments: A list of files attached to the message, and the tools they should be added to. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/threads/{thread_id}/messages", + body=maybe_transform( + { + "content": content, + "role": role, + "attachments": attachments, + "metadata": metadata, + }, + message_create_params.MessageCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Message, + ) + + def retrieve( + self, + message_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Message: + """ + Retrieve a message. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/threads/{thread_id}/messages/{message_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Message, + ) + + def update( + self, + message_id: str, + *, + thread_id: str, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Message: + """ + Modifies a message. + + Args: + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/threads/{thread_id}/messages/{message_id}", + body=maybe_transform({"metadata": metadata}, message_update_params.MessageUpdateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Message, + ) + + def list( + self, + thread_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + run_id: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[Message]: + """ + Returns a list of messages for a given thread. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + run_id: Filter messages by the run ID that generated them. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/threads/{thread_id}/messages", + page=SyncCursorPage[Message], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + "run_id": run_id, + }, + message_list_params.MessageListParams, + ), + ), + model=Message, + ) + + def delete( + self, + message_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> MessageDeleted: + """ + Deletes a message. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._delete( + f"/threads/{thread_id}/messages/{message_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=MessageDeleted, + ) + + +class AsyncMessages(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncMessagesWithRawResponse: + return AsyncMessagesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncMessagesWithStreamingResponse: + return AsyncMessagesWithStreamingResponse(self) + + async def create( + self, + thread_id: str, + *, + content: str, + role: Literal["user", "assistant"], + attachments: Optional[Iterable[message_create_params.Attachment]] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Message: + """ + Create a message. + + Args: + content: The content of the message. + + role: + The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. + + attachments: A list of files attached to the message, and the tools they should be added to. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/threads/{thread_id}/messages", + body=await async_maybe_transform( + { + "content": content, + "role": role, + "attachments": attachments, + "metadata": metadata, + }, + message_create_params.MessageCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Message, + ) + + async def retrieve( + self, + message_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Message: + """ + Retrieve a message. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/threads/{thread_id}/messages/{message_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Message, + ) + + async def update( + self, + message_id: str, + *, + thread_id: str, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Message: + """ + Modifies a message. + + Args: + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/threads/{thread_id}/messages/{message_id}", + body=await async_maybe_transform({"metadata": metadata}, message_update_params.MessageUpdateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Message, + ) + + def list( + self, + thread_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + run_id: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[Message, AsyncCursorPage[Message]]: + """ + Returns a list of messages for a given thread. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + run_id: Filter messages by the run ID that generated them. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/threads/{thread_id}/messages", + page=AsyncCursorPage[Message], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + "run_id": run_id, + }, + message_list_params.MessageListParams, + ), + ), + model=Message, + ) + + async def delete( + self, + message_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> MessageDeleted: + """ + Deletes a message. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._delete( + f"/threads/{thread_id}/messages/{message_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=MessageDeleted, + ) + + +class MessagesWithRawResponse: + def __init__(self, messages: Messages) -> None: + self._messages = messages + + self.create = _legacy_response.to_raw_response_wrapper( + messages.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + messages.retrieve, + ) + self.update = _legacy_response.to_raw_response_wrapper( + messages.update, + ) + self.list = _legacy_response.to_raw_response_wrapper( + messages.list, + ) + self.delete = _legacy_response.to_raw_response_wrapper( + messages.delete, + ) + + +class AsyncMessagesWithRawResponse: + def __init__(self, messages: AsyncMessages) -> None: + self._messages = messages + + self.create = _legacy_response.async_to_raw_response_wrapper( + messages.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + messages.retrieve, + ) + self.update = _legacy_response.async_to_raw_response_wrapper( + messages.update, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + messages.list, + ) + self.delete = _legacy_response.async_to_raw_response_wrapper( + messages.delete, + ) + + +class MessagesWithStreamingResponse: + def __init__(self, messages: Messages) -> None: + self._messages = messages + + self.create = to_streamed_response_wrapper( + messages.create, + ) + self.retrieve = to_streamed_response_wrapper( + messages.retrieve, + ) + self.update = to_streamed_response_wrapper( + messages.update, + ) + self.list = to_streamed_response_wrapper( + messages.list, + ) + self.delete = to_streamed_response_wrapper( + messages.delete, + ) + + +class AsyncMessagesWithStreamingResponse: + def __init__(self, messages: AsyncMessages) -> None: + self._messages = messages + + self.create = async_to_streamed_response_wrapper( + messages.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + messages.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + messages.update, + ) + self.list = async_to_streamed_response_wrapper( + messages.list, + ) + self.delete = async_to_streamed_response_wrapper( + messages.delete, + ) diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__init__.py b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__init__.py new file mode 100644 index 00000000..50aa9fae --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .runs import ( + Runs, + AsyncRuns, + RunsWithRawResponse, + AsyncRunsWithRawResponse, + RunsWithStreamingResponse, + AsyncRunsWithStreamingResponse, +) +from .steps import ( + Steps, + AsyncSteps, + StepsWithRawResponse, + AsyncStepsWithRawResponse, + StepsWithStreamingResponse, + AsyncStepsWithStreamingResponse, +) + +__all__ = [ + "Steps", + "AsyncSteps", + "StepsWithRawResponse", + "AsyncStepsWithRawResponse", + "StepsWithStreamingResponse", + "AsyncStepsWithStreamingResponse", + "Runs", + "AsyncRuns", + "RunsWithRawResponse", + "AsyncRunsWithRawResponse", + "RunsWithStreamingResponse", + "AsyncRunsWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..1be45dbc Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/runs.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/runs.cpython-311.pyc new file mode 100644 index 00000000..2c485a27 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/runs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/steps.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/steps.cpython-311.pyc new file mode 100644 index 00000000..f9f635f8 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/__pycache__/steps.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/runs/runs.py b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/runs.py new file mode 100644 index 00000000..4268d413 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/runs.py @@ -0,0 +1,3240 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import time +import typing_extensions +from typing import Union, Iterable, Optional, overload +from functools import partial +from typing_extensions import Literal + +import httpx + +from ..... import _legacy_response +from .steps import ( + Steps, + AsyncSteps, + StepsWithRawResponse, + AsyncStepsWithRawResponse, + StepsWithStreamingResponse, + AsyncStepsWithStreamingResponse, +) +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._utils import ( + is_given, + required_args, + maybe_transform, + async_maybe_transform, +) +from ....._compat import cached_property +from ....._resource import SyncAPIResource, AsyncAPIResource +from ....._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ....._streaming import Stream, AsyncStream +from .....pagination import SyncCursorPage, AsyncCursorPage +from ....._base_client import ( + AsyncPaginator, + make_request_options, +) +from .....lib.streaming import ( + AssistantEventHandler, + AssistantEventHandlerT, + AssistantStreamManager, + AsyncAssistantEventHandler, + AsyncAssistantEventHandlerT, + AsyncAssistantStreamManager, +) +from .....types.beta.threads import ( + run_list_params, + run_create_params, + run_update_params, + run_submit_tool_outputs_params, +) +from .....types.beta.threads.run import Run +from .....types.beta.assistant_tool_param import AssistantToolParam +from .....types.beta.assistant_stream_event import AssistantStreamEvent +from .....types.beta.assistant_tool_choice_option_param import AssistantToolChoiceOptionParam +from .....types.beta.assistant_response_format_option_param import AssistantResponseFormatOptionParam + +__all__ = ["Runs", "AsyncRuns"] + + +class Runs(SyncAPIResource): + @cached_property + def steps(self) -> Steps: + return Steps(self._client) + + @cached_property + def with_raw_response(self) -> RunsWithRawResponse: + return RunsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> RunsWithStreamingResponse: + return RunsWithStreamingResponse(self) + + @overload + def create( + self, + thread_id: str, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Create a run. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + additional_instructions: Appends additional instructions at the end of the instructions for the run. This + is useful for modifying the behavior on a per-run basis without overriding other + instructions. + + additional_messages: Adds additional messages to the thread before creating the run. + + instructions: Overrides the + [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + of the assistant. This is useful for modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create( + self, + thread_id: str, + *, + assistant_id: str, + stream: Literal[True], + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[AssistantStreamEvent]: + """ + Create a run. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + additional_instructions: Appends additional instructions at the end of the instructions for the run. This + is useful for modifying the behavior on a per-run basis without overriding other + instructions. + + additional_messages: Adds additional messages to the thread before creating the run. + + instructions: Overrides the + [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + of the assistant. This is useful for modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create( + self, + thread_id: str, + *, + assistant_id: str, + stream: bool, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | Stream[AssistantStreamEvent]: + """ + Create a run. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + additional_instructions: Appends additional instructions at the end of the instructions for the run. This + is useful for modifying the behavior on a per-run basis without overriding other + instructions. + + additional_messages: Adds additional messages to the thread before creating the run. + + instructions: Overrides the + [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + of the assistant. This is useful for modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["assistant_id"], ["assistant_id", "stream"]) + def create( + self, + thread_id: str, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | Stream[AssistantStreamEvent]: + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/threads/{thread_id}/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "stream": stream, + "temperature": temperature, + "tool_choice": tool_choice, + "tools": tools, + "top_p": top_p, + "truncation_strategy": truncation_strategy, + }, + run_create_params.RunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=stream or False, + stream_cls=Stream[AssistantStreamEvent], + ) + + def retrieve( + self, + run_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Retrieves a run. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/threads/{thread_id}/runs/{run_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + ) + + def update( + self, + run_id: str, + *, + thread_id: str, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Modifies a run. + + Args: + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/threads/{thread_id}/runs/{run_id}", + body=maybe_transform({"metadata": metadata}, run_update_params.RunUpdateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + ) + + def list( + self, + thread_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[Run]: + """ + Returns a list of runs belonging to a thread. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/threads/{thread_id}/runs", + page=SyncCursorPage[Run], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + run_list_params.RunListParams, + ), + ), + model=Run, + ) + + def cancel( + self, + run_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Cancels a run that is `in_progress`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/threads/{thread_id}/runs/{run_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + ) + + def create_and_poll( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to create a run an poll for a terminal state. More information on Run + lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + run = self.create( + thread_id=thread_id, + assistant_id=assistant_id, + additional_instructions=additional_instructions, + additional_messages=additional_messages, + instructions=instructions, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + metadata=metadata, + model=model, + response_format=response_format, + temperature=temperature, + tool_choice=tool_choice, + # We assume we are not streaming when polling + stream=False, + tools=tools, + truncation_strategy=truncation_strategy, + top_p=top_p, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + return self.poll( + run.id, + thread_id=thread_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + poll_interval_ms=poll_interval_ms, + timeout=timeout, + ) + + @overload + @typing_extensions.deprecated("use `stream` instead") + def create_and_stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler]: + """Create a Run stream""" + ... + + @overload + @typing_extensions.deprecated("use `stream` instead") + def create_and_stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandlerT]: + """Create a Run stream""" + ... + + @typing_extensions.deprecated("use `stream` instead") + def create_and_stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler] | AssistantStreamManager[AssistantEventHandlerT]: + """Create a Run stream""" + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.runs.create_and_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + make_request = partial( + self._post, + f"/threads/{thread_id}/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "temperature": temperature, + "tool_choice": tool_choice, + "stream": True, + "tools": tools, + "truncation_strategy": truncation_strategy, + "top_p": top_p, + }, + run_create_params.RunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=Stream[AssistantStreamEvent], + ) + return AssistantStreamManager(make_request, event_handler=event_handler or AssistantEventHandler()) + + def poll( + self, + run_id: str, + thread_id: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to poll a run status until it reaches a terminal state. More + information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + extra_headers = {"X-Stainless-Poll-Helper": "true", **(extra_headers or {})} + + if is_given(poll_interval_ms): + extra_headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms) + + terminal_states = {"requires_action", "cancelled", "completed", "failed", "expired"} + while True: + response = self.with_raw_response.retrieve( + thread_id=thread_id, + run_id=run_id, + extra_headers=extra_headers, + extra_body=extra_body, + extra_query=extra_query, + timeout=timeout, + ) + + run = response.parse() + # Return if we reached a terminal state + if run.status in terminal_states: + return run + + if not is_given(poll_interval_ms): + from_header = response.headers.get("openai-poll-after-ms") + if from_header is not None: + poll_interval_ms = int(from_header) + else: + poll_interval_ms = 1000 + + time.sleep(poll_interval_ms / 1000) + + @overload + def stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler]: + """Create a Run stream""" + ... + + @overload + def stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandlerT]: + """Create a Run stream""" + ... + + def stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler] | AssistantStreamManager[AssistantEventHandlerT]: + """Create a Run stream""" + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.runs.create_and_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + make_request = partial( + self._post, + f"/threads/{thread_id}/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "temperature": temperature, + "tool_choice": tool_choice, + "stream": True, + "tools": tools, + "truncation_strategy": truncation_strategy, + "top_p": top_p, + }, + run_create_params.RunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=Stream[AssistantStreamEvent], + ) + return AssistantStreamManager(make_request, event_handler=event_handler or AssistantEventHandler()) + + @overload + def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + When a run has the `status: "requires_action"` and `required_action.type` is + `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + tool calls once they're all completed. All outputs must be submitted in a single + request. + + Args: + tool_outputs: A list of tools for which the outputs are being submitted. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + stream: Literal[True], + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[AssistantStreamEvent]: + """ + When a run has the `status: "requires_action"` and `required_action.type` is + `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + tool calls once they're all completed. All outputs must be submitted in a single + request. + + Args: + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + tool_outputs: A list of tools for which the outputs are being submitted. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + stream: bool, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | Stream[AssistantStreamEvent]: + """ + When a run has the `status: "requires_action"` and `required_action.type` is + `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + tool calls once they're all completed. All outputs must be submitted in a single + request. + + Args: + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + tool_outputs: A list of tools for which the outputs are being submitted. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["thread_id", "tool_outputs"], ["thread_id", "stream", "tool_outputs"]) + def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | Stream[AssistantStreamEvent]: + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/threads/{thread_id}/runs/{run_id}/submit_tool_outputs", + body=maybe_transform( + { + "tool_outputs": tool_outputs, + "stream": stream, + }, + run_submit_tool_outputs_params.RunSubmitToolOutputsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=stream or False, + stream_cls=Stream[AssistantStreamEvent], + ) + + def submit_tool_outputs_and_poll( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to submit a tool output to a run and poll for a terminal run state. + More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + run = self.submit_tool_outputs( + run_id=run_id, + thread_id=thread_id, + tool_outputs=tool_outputs, + stream=False, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + return self.poll( + run_id=run.id, + thread_id=thread_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + poll_interval_ms=poll_interval_ms, + ) + + @overload + def submit_tool_outputs_stream( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler]: + """ + Submit the tool outputs from a previous run and stream the run to a terminal + state. More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + ... + + @overload + def submit_tool_outputs_stream( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + event_handler: AssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandlerT]: + """ + Submit the tool outputs from a previous run and stream the run to a terminal + state. More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + ... + + def submit_tool_outputs_stream( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + event_handler: AssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler] | AssistantStreamManager[AssistantEventHandlerT]: + """ + Submit the tool outputs from a previous run and stream the run to a terminal + state. More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.runs.submit_tool_outputs_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + request = partial( + self._post, + f"/threads/{thread_id}/runs/{run_id}/submit_tool_outputs", + body=maybe_transform( + { + "tool_outputs": tool_outputs, + "stream": True, + }, + run_submit_tool_outputs_params.RunSubmitToolOutputsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=Stream[AssistantStreamEvent], + ) + return AssistantStreamManager(request, event_handler=event_handler or AssistantEventHandler()) + + +class AsyncRuns(AsyncAPIResource): + @cached_property + def steps(self) -> AsyncSteps: + return AsyncSteps(self._client) + + @cached_property + def with_raw_response(self) -> AsyncRunsWithRawResponse: + return AsyncRunsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncRunsWithStreamingResponse: + return AsyncRunsWithStreamingResponse(self) + + @overload + async def create( + self, + thread_id: str, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Create a run. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + additional_instructions: Appends additional instructions at the end of the instructions for the run. This + is useful for modifying the behavior on a per-run basis without overriding other + instructions. + + additional_messages: Adds additional messages to the thread before creating the run. + + instructions: Overrides the + [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + of the assistant. This is useful for modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create( + self, + thread_id: str, + *, + assistant_id: str, + stream: Literal[True], + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[AssistantStreamEvent]: + """ + Create a run. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + additional_instructions: Appends additional instructions at the end of the instructions for the run. This + is useful for modifying the behavior on a per-run basis without overriding other + instructions. + + additional_messages: Adds additional messages to the thread before creating the run. + + instructions: Overrides the + [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + of the assistant. This is useful for modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create( + self, + thread_id: str, + *, + assistant_id: str, + stream: bool, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | AsyncStream[AssistantStreamEvent]: + """ + Create a run. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + additional_instructions: Appends additional instructions at the end of the instructions for the run. This + is useful for modifying the behavior on a per-run basis without overriding other + instructions. + + additional_messages: Adds additional messages to the thread before creating the run. + + instructions: Overrides the + [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + of the assistant. This is useful for modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["assistant_id"], ["assistant_id", "stream"]) + async def create( + self, + thread_id: str, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | AsyncStream[AssistantStreamEvent]: + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/threads/{thread_id}/runs", + body=await async_maybe_transform( + { + "assistant_id": assistant_id, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "stream": stream, + "temperature": temperature, + "tool_choice": tool_choice, + "tools": tools, + "top_p": top_p, + "truncation_strategy": truncation_strategy, + }, + run_create_params.RunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=stream or False, + stream_cls=AsyncStream[AssistantStreamEvent], + ) + + async def retrieve( + self, + run_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Retrieves a run. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/threads/{thread_id}/runs/{run_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + ) + + async def update( + self, + run_id: str, + *, + thread_id: str, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Modifies a run. + + Args: + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/threads/{thread_id}/runs/{run_id}", + body=await async_maybe_transform({"metadata": metadata}, run_update_params.RunUpdateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + ) + + def list( + self, + thread_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[Run, AsyncCursorPage[Run]]: + """ + Returns a list of runs belonging to a thread. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/threads/{thread_id}/runs", + page=AsyncCursorPage[Run], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + run_list_params.RunListParams, + ), + ), + model=Run, + ) + + async def cancel( + self, + run_id: str, + *, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Cancels a run that is `in_progress`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/threads/{thread_id}/runs/{run_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + ) + + async def create_and_poll( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to create a run an poll for a terminal state. More information on Run + lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + run = await self.create( + thread_id=thread_id, + assistant_id=assistant_id, + additional_instructions=additional_instructions, + additional_messages=additional_messages, + instructions=instructions, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + metadata=metadata, + model=model, + response_format=response_format, + temperature=temperature, + tool_choice=tool_choice, + # We assume we are not streaming when polling + stream=False, + tools=tools, + truncation_strategy=truncation_strategy, + top_p=top_p, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + return await self.poll( + run.id, + thread_id=thread_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + poll_interval_ms=poll_interval_ms, + timeout=timeout, + ) + + @overload + @typing_extensions.deprecated("use `stream` instead") + def create_and_stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandler]: + """Create a Run stream""" + ... + + @overload + @typing_extensions.deprecated("use `stream` instead") + def create_and_stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AsyncAssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandlerT]: + """Create a Run stream""" + ... + + @typing_extensions.deprecated("use `stream` instead") + def create_and_stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AsyncAssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ( + AsyncAssistantStreamManager[AsyncAssistantEventHandler] + | AsyncAssistantStreamManager[AsyncAssistantEventHandlerT] + ): + """Create a Run stream""" + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.runs.create_and_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + request = self._post( + f"/threads/{thread_id}/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "temperature": temperature, + "tool_choice": tool_choice, + "stream": True, + "tools": tools, + "truncation_strategy": truncation_strategy, + "top_p": top_p, + }, + run_create_params.RunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=AsyncStream[AssistantStreamEvent], + ) + return AsyncAssistantStreamManager(request, event_handler=event_handler or AsyncAssistantEventHandler()) + + async def poll( + self, + run_id: str, + thread_id: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to poll a run status until it reaches a terminal state. More + information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + extra_headers = {"X-Stainless-Poll-Helper": "true", **(extra_headers or {})} + + if is_given(poll_interval_ms): + extra_headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms) + + terminal_states = {"requires_action", "cancelled", "completed", "failed", "expired"} + while True: + response = await self.with_raw_response.retrieve( + thread_id=thread_id, + run_id=run_id, + extra_headers=extra_headers, + extra_body=extra_body, + extra_query=extra_query, + timeout=timeout, + ) + + run = response.parse() + # Return if we reached a terminal state + if run.status in terminal_states: + return run + + if not is_given(poll_interval_ms): + from_header = response.headers.get("openai-poll-after-ms") + if from_header is not None: + poll_interval_ms = int(from_header) + else: + poll_interval_ms = 1000 + + time.sleep(poll_interval_ms / 1000) + + @overload + def stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandler]: + """Create a Run stream""" + ... + + @overload + def stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AsyncAssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandlerT]: + """Create a Run stream""" + ... + + def stream( + self, + *, + assistant_id: str, + additional_instructions: Optional[str] | NotGiven = NOT_GIVEN, + additional_messages: Optional[Iterable[run_create_params.AdditionalMessage]] | NotGiven = NOT_GIVEN, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[run_create_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + thread_id: str, + event_handler: AsyncAssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ( + AsyncAssistantStreamManager[AsyncAssistantEventHandler] + | AsyncAssistantStreamManager[AsyncAssistantEventHandlerT] + ): + """Create a Run stream""" + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.runs.create_and_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + request = self._post( + f"/threads/{thread_id}/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "temperature": temperature, + "tool_choice": tool_choice, + "stream": True, + "tools": tools, + "truncation_strategy": truncation_strategy, + "top_p": top_p, + }, + run_create_params.RunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=AsyncStream[AssistantStreamEvent], + ) + return AsyncAssistantStreamManager(request, event_handler=event_handler or AsyncAssistantEventHandler()) + + @overload + async def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + When a run has the `status: "requires_action"` and `required_action.type` is + `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + tool calls once they're all completed. All outputs must be submitted in a single + request. + + Args: + tool_outputs: A list of tools for which the outputs are being submitted. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + stream: Literal[True], + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[AssistantStreamEvent]: + """ + When a run has the `status: "requires_action"` and `required_action.type` is + `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + tool calls once they're all completed. All outputs must be submitted in a single + request. + + Args: + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + tool_outputs: A list of tools for which the outputs are being submitted. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + stream: bool, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | AsyncStream[AssistantStreamEvent]: + """ + When a run has the `status: "requires_action"` and `required_action.type` is + `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + tool calls once they're all completed. All outputs must be submitted in a single + request. + + Args: + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + tool_outputs: A list of tools for which the outputs are being submitted. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["thread_id", "tool_outputs"], ["thread_id", "stream", "tool_outputs"]) + async def submit_tool_outputs( + self, + run_id: str, + *, + thread_id: str, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | AsyncStream[AssistantStreamEvent]: + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/threads/{thread_id}/runs/{run_id}/submit_tool_outputs", + body=await async_maybe_transform( + { + "tool_outputs": tool_outputs, + "stream": stream, + }, + run_submit_tool_outputs_params.RunSubmitToolOutputsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=stream or False, + stream_cls=AsyncStream[AssistantStreamEvent], + ) + + async def submit_tool_outputs_and_poll( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to submit a tool output to a run and poll for a terminal run state. + More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + run = await self.submit_tool_outputs( + run_id=run_id, + thread_id=thread_id, + tool_outputs=tool_outputs, + stream=False, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + return await self.poll( + run_id=run.id, + thread_id=thread_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + poll_interval_ms=poll_interval_ms, + ) + + @overload + def submit_tool_outputs_stream( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandler]: + """ + Submit the tool outputs from a previous run and stream the run to a terminal + state. More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + ... + + @overload + def submit_tool_outputs_stream( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + event_handler: AsyncAssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandlerT]: + """ + Submit the tool outputs from a previous run and stream the run to a terminal + state. More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + ... + + def submit_tool_outputs_stream( + self, + *, + tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput], + run_id: str, + thread_id: str, + event_handler: AsyncAssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ( + AsyncAssistantStreamManager[AsyncAssistantEventHandler] + | AsyncAssistantStreamManager[AsyncAssistantEventHandlerT] + ): + """ + Submit the tool outputs from a previous run and stream the run to a terminal + state. More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.runs.submit_tool_outputs_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + request = self._post( + f"/threads/{thread_id}/runs/{run_id}/submit_tool_outputs", + body=maybe_transform( + { + "tool_outputs": tool_outputs, + "stream": True, + }, + run_submit_tool_outputs_params.RunSubmitToolOutputsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=AsyncStream[AssistantStreamEvent], + ) + return AsyncAssistantStreamManager(request, event_handler=event_handler or AsyncAssistantEventHandler()) + + +class RunsWithRawResponse: + def __init__(self, runs: Runs) -> None: + self._runs = runs + + self.create = _legacy_response.to_raw_response_wrapper( + runs.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + runs.retrieve, + ) + self.update = _legacy_response.to_raw_response_wrapper( + runs.update, + ) + self.list = _legacy_response.to_raw_response_wrapper( + runs.list, + ) + self.cancel = _legacy_response.to_raw_response_wrapper( + runs.cancel, + ) + self.submit_tool_outputs = _legacy_response.to_raw_response_wrapper( + runs.submit_tool_outputs, + ) + + @cached_property + def steps(self) -> StepsWithRawResponse: + return StepsWithRawResponse(self._runs.steps) + + +class AsyncRunsWithRawResponse: + def __init__(self, runs: AsyncRuns) -> None: + self._runs = runs + + self.create = _legacy_response.async_to_raw_response_wrapper( + runs.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + runs.retrieve, + ) + self.update = _legacy_response.async_to_raw_response_wrapper( + runs.update, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + runs.list, + ) + self.cancel = _legacy_response.async_to_raw_response_wrapper( + runs.cancel, + ) + self.submit_tool_outputs = _legacy_response.async_to_raw_response_wrapper( + runs.submit_tool_outputs, + ) + + @cached_property + def steps(self) -> AsyncStepsWithRawResponse: + return AsyncStepsWithRawResponse(self._runs.steps) + + +class RunsWithStreamingResponse: + def __init__(self, runs: Runs) -> None: + self._runs = runs + + self.create = to_streamed_response_wrapper( + runs.create, + ) + self.retrieve = to_streamed_response_wrapper( + runs.retrieve, + ) + self.update = to_streamed_response_wrapper( + runs.update, + ) + self.list = to_streamed_response_wrapper( + runs.list, + ) + self.cancel = to_streamed_response_wrapper( + runs.cancel, + ) + self.submit_tool_outputs = to_streamed_response_wrapper( + runs.submit_tool_outputs, + ) + + @cached_property + def steps(self) -> StepsWithStreamingResponse: + return StepsWithStreamingResponse(self._runs.steps) + + +class AsyncRunsWithStreamingResponse: + def __init__(self, runs: AsyncRuns) -> None: + self._runs = runs + + self.create = async_to_streamed_response_wrapper( + runs.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + runs.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + runs.update, + ) + self.list = async_to_streamed_response_wrapper( + runs.list, + ) + self.cancel = async_to_streamed_response_wrapper( + runs.cancel, + ) + self.submit_tool_outputs = async_to_streamed_response_wrapper( + runs.submit_tool_outputs, + ) + + @cached_property + def steps(self) -> AsyncStepsWithStreamingResponse: + return AsyncStepsWithStreamingResponse(self._runs.steps) diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/runs/steps.py b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/steps.py new file mode 100644 index 00000000..51200893 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/threads/runs/steps.py @@ -0,0 +1,311 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal + +import httpx + +from ..... import _legacy_response +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._utils import maybe_transform +from ....._compat import cached_property +from ....._resource import SyncAPIResource, AsyncAPIResource +from ....._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .....pagination import SyncCursorPage, AsyncCursorPage +from ....._base_client import ( + AsyncPaginator, + make_request_options, +) +from .....types.beta.threads.runs import step_list_params +from .....types.beta.threads.runs.run_step import RunStep + +__all__ = ["Steps", "AsyncSteps"] + + +class Steps(SyncAPIResource): + @cached_property + def with_raw_response(self) -> StepsWithRawResponse: + return StepsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> StepsWithStreamingResponse: + return StepsWithStreamingResponse(self) + + def retrieve( + self, + step_id: str, + *, + thread_id: str, + run_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> RunStep: + """ + Retrieves a run step. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + if not step_id: + raise ValueError(f"Expected a non-empty value for `step_id` but received {step_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/threads/{thread_id}/runs/{run_id}/steps/{step_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=RunStep, + ) + + def list( + self, + run_id: str, + *, + thread_id: str, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[RunStep]: + """ + Returns a list of run steps belonging to a run. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/threads/{thread_id}/runs/{run_id}/steps", + page=SyncCursorPage[RunStep], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + step_list_params.StepListParams, + ), + ), + model=RunStep, + ) + + +class AsyncSteps(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncStepsWithRawResponse: + return AsyncStepsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncStepsWithStreamingResponse: + return AsyncStepsWithStreamingResponse(self) + + async def retrieve( + self, + step_id: str, + *, + thread_id: str, + run_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> RunStep: + """ + Retrieves a run step. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + if not step_id: + raise ValueError(f"Expected a non-empty value for `step_id` but received {step_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/threads/{thread_id}/runs/{run_id}/steps/{step_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=RunStep, + ) + + def list( + self, + run_id: str, + *, + thread_id: str, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[RunStep, AsyncCursorPage[RunStep]]: + """ + Returns a list of run steps belonging to a run. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + if not run_id: + raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/threads/{thread_id}/runs/{run_id}/steps", + page=AsyncCursorPage[RunStep], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + step_list_params.StepListParams, + ), + ), + model=RunStep, + ) + + +class StepsWithRawResponse: + def __init__(self, steps: Steps) -> None: + self._steps = steps + + self.retrieve = _legacy_response.to_raw_response_wrapper( + steps.retrieve, + ) + self.list = _legacy_response.to_raw_response_wrapper( + steps.list, + ) + + +class AsyncStepsWithRawResponse: + def __init__(self, steps: AsyncSteps) -> None: + self._steps = steps + + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + steps.retrieve, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + steps.list, + ) + + +class StepsWithStreamingResponse: + def __init__(self, steps: Steps) -> None: + self._steps = steps + + self.retrieve = to_streamed_response_wrapper( + steps.retrieve, + ) + self.list = to_streamed_response_wrapper( + steps.list, + ) + + +class AsyncStepsWithStreamingResponse: + def __init__(self, steps: AsyncSteps) -> None: + self._steps = steps + + self.retrieve = async_to_streamed_response_wrapper( + steps.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + steps.list, + ) diff --git a/.venv/Lib/site-packages/openai/resources/beta/threads/threads.py b/.venv/Lib/site-packages/openai/resources/beta/threads/threads.py new file mode 100644 index 00000000..24552726 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/threads/threads.py @@ -0,0 +1,2130 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable, Optional, overload +from functools import partial +from typing_extensions import Literal + +import httpx + +from .... import _legacy_response +from .runs import ( + Runs, + AsyncRuns, + RunsWithRawResponse, + AsyncRunsWithRawResponse, + RunsWithStreamingResponse, + AsyncRunsWithStreamingResponse, +) +from .messages import ( + Messages, + AsyncMessages, + MessagesWithRawResponse, + AsyncMessagesWithRawResponse, + MessagesWithStreamingResponse, + AsyncMessagesWithStreamingResponse, +) +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._utils import ( + required_args, + maybe_transform, + async_maybe_transform, +) +from .runs.runs import Runs, AsyncRuns +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ...._streaming import Stream, AsyncStream +from ....types.beta import ( + thread_create_params, + thread_update_params, + thread_create_and_run_params, +) +from ...._base_client import ( + make_request_options, +) +from ....lib.streaming import ( + AssistantEventHandler, + AssistantEventHandlerT, + AssistantStreamManager, + AsyncAssistantEventHandler, + AsyncAssistantEventHandlerT, + AsyncAssistantStreamManager, +) +from ....types.beta.thread import Thread +from ....types.beta.threads.run import Run +from ....types.beta.thread_deleted import ThreadDeleted +from ....types.beta.assistant_stream_event import AssistantStreamEvent +from ....types.beta.assistant_tool_choice_option_param import AssistantToolChoiceOptionParam +from ....types.beta.assistant_response_format_option_param import AssistantResponseFormatOptionParam + +__all__ = ["Threads", "AsyncThreads"] + + +class Threads(SyncAPIResource): + @cached_property + def runs(self) -> Runs: + return Runs(self._client) + + @cached_property + def messages(self) -> Messages: + return Messages(self._client) + + @cached_property + def with_raw_response(self) -> ThreadsWithRawResponse: + return ThreadsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ThreadsWithStreamingResponse: + return ThreadsWithStreamingResponse(self) + + def create( + self, + *, + messages: Iterable[thread_create_params.Message] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_params.ToolResources] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Thread: + """ + Create a thread. + + Args: + messages: A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + start the thread with. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + tool_resources: A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + "/threads", + body=maybe_transform( + { + "messages": messages, + "metadata": metadata, + "tool_resources": tool_resources, + }, + thread_create_params.ThreadCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Thread, + ) + + def retrieve( + self, + thread_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Thread: + """ + Retrieves a thread. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/threads/{thread_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Thread, + ) + + def update( + self, + thread_id: str, + *, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_update_params.ToolResources] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Thread: + """ + Modifies a thread. + + Args: + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + tool_resources: A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/threads/{thread_id}", + body=maybe_transform( + { + "metadata": metadata, + "tool_resources": tool_resources, + }, + thread_update_params.ThreadUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Thread, + ) + + def delete( + self, + thread_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ThreadDeleted: + """ + Delete a thread. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._delete( + f"/threads/{thread_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ThreadDeleted, + ) + + @overload + def create_and_run( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Create a thread and run it in one request. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + instructions: Override the default system message of the assistant. This is useful for + modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + thread: If no thread is provided, an empty thread will be created. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create_and_run( + self, + *, + assistant_id: str, + stream: Literal[True], + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[AssistantStreamEvent]: + """ + Create a thread and run it in one request. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + instructions: Override the default system message of the assistant. This is useful for + modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + thread: If no thread is provided, an empty thread will be created. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create_and_run( + self, + *, + assistant_id: str, + stream: bool, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | Stream[AssistantStreamEvent]: + """ + Create a thread and run it in one request. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + instructions: Override the default system message of the assistant. This is useful for + modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + thread: If no thread is provided, an empty thread will be created. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["assistant_id"], ["assistant_id", "stream"]) + def create_and_run( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | Stream[AssistantStreamEvent]: + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + "/threads/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "stream": stream, + "temperature": temperature, + "thread": thread, + "tool_choice": tool_choice, + "tool_resources": tool_resources, + "tools": tools, + "top_p": top_p, + "truncation_strategy": truncation_strategy, + }, + thread_create_and_run_params.ThreadCreateAndRunParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=stream or False, + stream_cls=Stream[AssistantStreamEvent], + ) + + def create_and_run_poll( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to create a thread, start a run and then poll for a terminal state. + More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + run = self.create_and_run( + assistant_id=assistant_id, + instructions=instructions, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + metadata=metadata, + model=model, + response_format=response_format, + temperature=temperature, + stream=False, + thread=thread, + tool_resources=tool_resources, + tool_choice=tool_choice, + truncation_strategy=truncation_strategy, + top_p=top_p, + tools=tools, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + return self.runs.poll(run.id, run.thread_id, extra_headers, extra_query, extra_body, timeout, poll_interval_ms) + + @overload + def create_and_run_stream( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler]: + """Create a thread and stream the run back""" + ... + + @overload + def create_and_run_stream( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + event_handler: AssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandlerT]: + """Create a thread and stream the run back""" + ... + + def create_and_run_stream( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + event_handler: AssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AssistantStreamManager[AssistantEventHandler] | AssistantStreamManager[AssistantEventHandlerT]: + """Create a thread and stream the run back""" + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.create_and_run_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + make_request = partial( + self._post, + "/threads/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "temperature": temperature, + "tool_choice": tool_choice, + "stream": True, + "thread": thread, + "tools": tools, + "tool": tool_resources, + "truncation_strategy": truncation_strategy, + "top_p": top_p, + }, + thread_create_and_run_params.ThreadCreateAndRunParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=Stream[AssistantStreamEvent], + ) + return AssistantStreamManager(make_request, event_handler=event_handler or AssistantEventHandler()) + + +class AsyncThreads(AsyncAPIResource): + @cached_property + def runs(self) -> AsyncRuns: + return AsyncRuns(self._client) + + @cached_property + def messages(self) -> AsyncMessages: + return AsyncMessages(self._client) + + @cached_property + def with_raw_response(self) -> AsyncThreadsWithRawResponse: + return AsyncThreadsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncThreadsWithStreamingResponse: + return AsyncThreadsWithStreamingResponse(self) + + async def create( + self, + *, + messages: Iterable[thread_create_params.Message] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_params.ToolResources] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Thread: + """ + Create a thread. + + Args: + messages: A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + start the thread with. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + tool_resources: A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + "/threads", + body=await async_maybe_transform( + { + "messages": messages, + "metadata": metadata, + "tool_resources": tool_resources, + }, + thread_create_params.ThreadCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Thread, + ) + + async def retrieve( + self, + thread_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Thread: + """ + Retrieves a thread. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/threads/{thread_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Thread, + ) + + async def update( + self, + thread_id: str, + *, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_update_params.ToolResources] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Thread: + """ + Modifies a thread. + + Args: + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + tool_resources: A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/threads/{thread_id}", + body=await async_maybe_transform( + { + "metadata": metadata, + "tool_resources": tool_resources, + }, + thread_update_params.ThreadUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Thread, + ) + + async def delete( + self, + thread_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ThreadDeleted: + """ + Delete a thread. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not thread_id: + raise ValueError(f"Expected a non-empty value for `thread_id` but received {thread_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._delete( + f"/threads/{thread_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ThreadDeleted, + ) + + @overload + async def create_and_run( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + Create a thread and run it in one request. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + instructions: Override the default system message of the assistant. This is useful for + modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + thread: If no thread is provided, an empty thread will be created. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create_and_run( + self, + *, + assistant_id: str, + stream: Literal[True], + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[AssistantStreamEvent]: + """ + Create a thread and run it in one request. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + instructions: Override the default system message of the assistant. This is useful for + modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + thread: If no thread is provided, an empty thread will be created. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create_and_run( + self, + *, + assistant_id: str, + stream: bool, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | AsyncStream[AssistantStreamEvent]: + """ + Create a thread and run it in one request. + + Args: + assistant_id: The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + + stream: If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + + instructions: Override the default system message of the assistant. This is useful for + modifying the behavior on a per-run basis. + + max_completion_tokens: The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + max_prompt_tokens: The maximum number of prompt tokens that may be used over the course of the run. + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + model: The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + + response_format: Specifies the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + thread: If no thread is provided, an empty thread will be created. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + tool_resources: A set of resources that are used by the assistant's tools. The resources are + specific to the type of tool. For example, the `code_interpreter` tool requires + a list of file IDs, while the `file_search` tool requires a list of vector store + IDs. + + tools: Override the tools the assistant can use for this run. This is useful for + modifying the behavior on a per-run basis. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + + truncation_strategy: Controls for how a thread will be truncated prior to the run. Use this to + control the intial context window of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["assistant_id"], ["assistant_id", "stream"]) + async def create_and_run( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run | AsyncStream[AssistantStreamEvent]: + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + "/threads/runs", + body=await async_maybe_transform( + { + "assistant_id": assistant_id, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "stream": stream, + "temperature": temperature, + "thread": thread, + "tool_choice": tool_choice, + "tool_resources": tool_resources, + "tools": tools, + "top_p": top_p, + "truncation_strategy": truncation_strategy, + }, + thread_create_and_run_params.ThreadCreateAndRunParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=stream or False, + stream_cls=AsyncStream[AssistantStreamEvent], + ) + + async def create_and_run_poll( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Run: + """ + A helper to create a thread, start a run and then poll for a terminal state. + More information on Run lifecycles can be found here: + https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + """ + run = await self.create_and_run( + assistant_id=assistant_id, + instructions=instructions, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + metadata=metadata, + model=model, + response_format=response_format, + temperature=temperature, + stream=False, + thread=thread, + tool_resources=tool_resources, + tool_choice=tool_choice, + truncation_strategy=truncation_strategy, + top_p=top_p, + tools=tools, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + return await self.runs.poll( + run.id, run.thread_id, extra_headers, extra_query, extra_body, timeout, poll_interval_ms + ) + + @overload + def create_and_run_stream( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandler]: + """Create a thread and stream the run back""" + ... + + @overload + def create_and_run_stream( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + event_handler: AsyncAssistantEventHandlerT, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncAssistantStreamManager[AsyncAssistantEventHandlerT]: + """Create a thread and stream the run back""" + ... + + def create_and_run_stream( + self, + *, + assistant_id: str, + instructions: Optional[str] | NotGiven = NOT_GIVEN, + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, + max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + | NotGiven = NOT_GIVEN, + response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, + tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN, + tool_resources: Optional[thread_create_and_run_params.ToolResources] | NotGiven = NOT_GIVEN, + tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + truncation_strategy: Optional[thread_create_and_run_params.TruncationStrategy] | NotGiven = NOT_GIVEN, + event_handler: AsyncAssistantEventHandlerT | None = None, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ( + AsyncAssistantStreamManager[AsyncAssistantEventHandler] + | AsyncAssistantStreamManager[AsyncAssistantEventHandlerT] + ): + """Create a thread and stream the run back""" + extra_headers = { + "OpenAI-Beta": "assistants=v2", + "X-Stainless-Stream-Helper": "threads.create_and_run_stream", + "X-Stainless-Custom-Event-Handler": "true" if event_handler else "false", + **(extra_headers or {}), + } + request = self._post( + "/threads/runs", + body=maybe_transform( + { + "assistant_id": assistant_id, + "instructions": instructions, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "response_format": response_format, + "temperature": temperature, + "tool_choice": tool_choice, + "stream": True, + "thread": thread, + "tools": tools, + "tool": tool_resources, + "truncation_strategy": truncation_strategy, + "top_p": top_p, + }, + thread_create_and_run_params.ThreadCreateAndRunParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Run, + stream=True, + stream_cls=AsyncStream[AssistantStreamEvent], + ) + return AsyncAssistantStreamManager(request, event_handler=event_handler or AsyncAssistantEventHandler()) + + +class ThreadsWithRawResponse: + def __init__(self, threads: Threads) -> None: + self._threads = threads + + self.create = _legacy_response.to_raw_response_wrapper( + threads.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + threads.retrieve, + ) + self.update = _legacy_response.to_raw_response_wrapper( + threads.update, + ) + self.delete = _legacy_response.to_raw_response_wrapper( + threads.delete, + ) + self.create_and_run = _legacy_response.to_raw_response_wrapper( + threads.create_and_run, + ) + + @cached_property + def runs(self) -> RunsWithRawResponse: + return RunsWithRawResponse(self._threads.runs) + + @cached_property + def messages(self) -> MessagesWithRawResponse: + return MessagesWithRawResponse(self._threads.messages) + + +class AsyncThreadsWithRawResponse: + def __init__(self, threads: AsyncThreads) -> None: + self._threads = threads + + self.create = _legacy_response.async_to_raw_response_wrapper( + threads.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + threads.retrieve, + ) + self.update = _legacy_response.async_to_raw_response_wrapper( + threads.update, + ) + self.delete = _legacy_response.async_to_raw_response_wrapper( + threads.delete, + ) + self.create_and_run = _legacy_response.async_to_raw_response_wrapper( + threads.create_and_run, + ) + + @cached_property + def runs(self) -> AsyncRunsWithRawResponse: + return AsyncRunsWithRawResponse(self._threads.runs) + + @cached_property + def messages(self) -> AsyncMessagesWithRawResponse: + return AsyncMessagesWithRawResponse(self._threads.messages) + + +class ThreadsWithStreamingResponse: + def __init__(self, threads: Threads) -> None: + self._threads = threads + + self.create = to_streamed_response_wrapper( + threads.create, + ) + self.retrieve = to_streamed_response_wrapper( + threads.retrieve, + ) + self.update = to_streamed_response_wrapper( + threads.update, + ) + self.delete = to_streamed_response_wrapper( + threads.delete, + ) + self.create_and_run = to_streamed_response_wrapper( + threads.create_and_run, + ) + + @cached_property + def runs(self) -> RunsWithStreamingResponse: + return RunsWithStreamingResponse(self._threads.runs) + + @cached_property + def messages(self) -> MessagesWithStreamingResponse: + return MessagesWithStreamingResponse(self._threads.messages) + + +class AsyncThreadsWithStreamingResponse: + def __init__(self, threads: AsyncThreads) -> None: + self._threads = threads + + self.create = async_to_streamed_response_wrapper( + threads.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + threads.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + threads.update, + ) + self.delete = async_to_streamed_response_wrapper( + threads.delete, + ) + self.create_and_run = async_to_streamed_response_wrapper( + threads.create_and_run, + ) + + @cached_property + def runs(self) -> AsyncRunsWithStreamingResponse: + return AsyncRunsWithStreamingResponse(self._threads.runs) + + @cached_property + def messages(self) -> AsyncMessagesWithStreamingResponse: + return AsyncMessagesWithStreamingResponse(self._threads.messages) diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__init__.py b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__init__.py new file mode 100644 index 00000000..96ae16c3 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__init__.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .files import ( + Files, + AsyncFiles, + FilesWithRawResponse, + AsyncFilesWithRawResponse, + FilesWithStreamingResponse, + AsyncFilesWithStreamingResponse, +) +from .file_batches import ( + FileBatches, + AsyncFileBatches, + FileBatchesWithRawResponse, + AsyncFileBatchesWithRawResponse, + FileBatchesWithStreamingResponse, + AsyncFileBatchesWithStreamingResponse, +) +from .vector_stores import ( + VectorStores, + AsyncVectorStores, + VectorStoresWithRawResponse, + AsyncVectorStoresWithRawResponse, + VectorStoresWithStreamingResponse, + AsyncVectorStoresWithStreamingResponse, +) + +__all__ = [ + "Files", + "AsyncFiles", + "FilesWithRawResponse", + "AsyncFilesWithRawResponse", + "FilesWithStreamingResponse", + "AsyncFilesWithStreamingResponse", + "FileBatches", + "AsyncFileBatches", + "FileBatchesWithRawResponse", + "AsyncFileBatchesWithRawResponse", + "FileBatchesWithStreamingResponse", + "AsyncFileBatchesWithStreamingResponse", + "VectorStores", + "AsyncVectorStores", + "VectorStoresWithRawResponse", + "AsyncVectorStoresWithRawResponse", + "VectorStoresWithStreamingResponse", + "AsyncVectorStoresWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..de8f357c Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/file_batches.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/file_batches.cpython-311.pyc new file mode 100644 index 00000000..e36e4b06 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/file_batches.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/files.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/files.cpython-311.pyc new file mode 100644 index 00000000..41f6f131 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/files.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/vector_stores.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/vector_stores.cpython-311.pyc new file mode 100644 index 00000000..63433c40 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/__pycache__/vector_stores.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/file_batches.py b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/file_batches.py new file mode 100644 index 00000000..f1ced517 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/file_batches.py @@ -0,0 +1,736 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import asyncio +from typing import List, Iterable +from typing_extensions import Literal +from concurrent.futures import Future, ThreadPoolExecutor, as_completed + +import httpx +import sniffio + +from .... import _legacy_response +from ....types import FileObject +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes +from ...._utils import ( + is_given, + maybe_transform, + async_maybe_transform, +) +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ....pagination import SyncCursorPage, AsyncCursorPage +from ...._base_client import ( + AsyncPaginator, + make_request_options, +) +from ....types.beta.vector_stores import file_batch_create_params, file_batch_list_files_params +from ....types.beta.vector_stores.vector_store_file import VectorStoreFile +from ....types.beta.vector_stores.vector_store_file_batch import VectorStoreFileBatch + +__all__ = ["FileBatches", "AsyncFileBatches"] + + +class FileBatches(SyncAPIResource): + @cached_property + def with_raw_response(self) -> FileBatchesWithRawResponse: + return FileBatchesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> FileBatchesWithStreamingResponse: + return FileBatchesWithStreamingResponse(self) + + def create( + self, + vector_store_id: str, + *, + file_ids: List[str], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """ + Create a vector store file batch. + + Args: + file_ids: A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + the vector store should use. Useful for tools like `file_search` that can access + files. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/vector_stores/{vector_store_id}/file_batches", + body=maybe_transform({"file_ids": file_ids}, file_batch_create_params.FileBatchCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileBatch, + ) + + def retrieve( + self, + batch_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """ + Retrieves a vector store file batch. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/vector_stores/{vector_store_id}/file_batches/{batch_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileBatch, + ) + + def cancel( + self, + batch_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Cancel a vector store file batch. + + This attempts to cancel the processing of + files in this batch as soon as possible. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileBatch, + ) + + def create_and_poll( + self, + vector_store_id: str, + *, + file_ids: List[str], + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Create a vector store batch and poll until all files have been processed.""" + batch = self.create( + vector_store_id=vector_store_id, + file_ids=file_ids, + ) + # TODO: don't poll unless necessary?? + return self.poll( + batch.id, + vector_store_id=vector_store_id, + poll_interval_ms=poll_interval_ms, + ) + + def list_files( + self, + batch_id: str, + *, + vector_store_id: str, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + filter: Literal["in_progress", "completed", "failed", "cancelled"] | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[VectorStoreFile]: + """ + Returns a list of vector store files in a batch. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + filter: Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/vector_stores/{vector_store_id}/file_batches/{batch_id}/files", + page=SyncCursorPage[VectorStoreFile], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "filter": filter, + "limit": limit, + "order": order, + }, + file_batch_list_files_params.FileBatchListFilesParams, + ), + ), + model=VectorStoreFile, + ) + + def poll( + self, + batch_id: str, + *, + vector_store_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Wait for the given file batch to be processed. + + Note: this will return even if one of the files failed to process, you need to + check batch.file_counts.failed_count to handle this case. + """ + headers: dict[str, str] = {"X-Stainless-Poll-Helper": "true"} + if is_given(poll_interval_ms): + headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms) + + while True: + response = self.with_raw_response.retrieve( + batch_id, + vector_store_id=vector_store_id, + extra_headers=headers, + ) + + batch = response.parse() + if batch.file_counts.in_progress > 0: + if not is_given(poll_interval_ms): + from_header = response.headers.get("openai-poll-after-ms") + if from_header is not None: + poll_interval_ms = int(from_header) + else: + poll_interval_ms = 1000 + + self._sleep(poll_interval_ms / 1000) + continue + + return batch + + def upload_and_poll( + self, + vector_store_id: str, + *, + files: Iterable[FileTypes], + max_concurrency: int = 5, + file_ids: List[str] = [], + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Uploads the given files concurrently and then creates a vector store file batch. + + If you've already uploaded certain files that you want to include in this batch + then you can pass their IDs through the `file_ids` argument. + + By default, if any file upload fails then an exception will be eagerly raised. + + The number of concurrency uploads is configurable using the `max_concurrency` + parameter. + + Note: this method only supports `asyncio` or `trio` as the backing async + runtime. + """ + results: list[FileObject] = [] + + with ThreadPoolExecutor(max_workers=max_concurrency) as executor: + futures: list[Future[FileObject]] = [ + executor.submit( + self._client.files.create, + file=file, + purpose="assistants", + ) + for file in files + ] + + for future in as_completed(futures): + exc = future.exception() + if exc: + raise exc + + results.append(future.result()) + + batch = self.create_and_poll( + vector_store_id=vector_store_id, + file_ids=[*file_ids, *(f.id for f in results)], + poll_interval_ms=poll_interval_ms, + ) + return batch + + +class AsyncFileBatches(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncFileBatchesWithRawResponse: + return AsyncFileBatchesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncFileBatchesWithStreamingResponse: + return AsyncFileBatchesWithStreamingResponse(self) + + async def create( + self, + vector_store_id: str, + *, + file_ids: List[str], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """ + Create a vector store file batch. + + Args: + file_ids: A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + the vector store should use. Useful for tools like `file_search` that can access + files. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/vector_stores/{vector_store_id}/file_batches", + body=await async_maybe_transform({"file_ids": file_ids}, file_batch_create_params.FileBatchCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileBatch, + ) + + async def retrieve( + self, + batch_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """ + Retrieves a vector store file batch. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/vector_stores/{vector_store_id}/file_batches/{batch_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileBatch, + ) + + async def cancel( + self, + batch_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Cancel a vector store file batch. + + This attempts to cancel the processing of + files in this batch as soon as possible. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileBatch, + ) + + async def create_and_poll( + self, + vector_store_id: str, + *, + file_ids: List[str], + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Create a vector store batch and poll until all files have been processed.""" + batch = await self.create( + vector_store_id=vector_store_id, + file_ids=file_ids, + ) + # TODO: don't poll unless necessary?? + return await self.poll( + batch.id, + vector_store_id=vector_store_id, + poll_interval_ms=poll_interval_ms, + ) + + def list_files( + self, + batch_id: str, + *, + vector_store_id: str, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + filter: Literal["in_progress", "completed", "failed", "cancelled"] | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[VectorStoreFile, AsyncCursorPage[VectorStoreFile]]: + """ + Returns a list of vector store files in a batch. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + filter: Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not batch_id: + raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/vector_stores/{vector_store_id}/file_batches/{batch_id}/files", + page=AsyncCursorPage[VectorStoreFile], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "filter": filter, + "limit": limit, + "order": order, + }, + file_batch_list_files_params.FileBatchListFilesParams, + ), + ), + model=VectorStoreFile, + ) + + async def poll( + self, + batch_id: str, + *, + vector_store_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Wait for the given file batch to be processed. + + Note: this will return even if one of the files failed to process, you need to + check batch.file_counts.failed_count to handle this case. + """ + headers: dict[str, str] = {"X-Stainless-Poll-Helper": "true"} + if is_given(poll_interval_ms): + headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms) + + while True: + response = await self.with_raw_response.retrieve( + batch_id, + vector_store_id=vector_store_id, + extra_headers=headers, + ) + + batch = response.parse() + if batch.file_counts.in_progress > 0: + if not is_given(poll_interval_ms): + from_header = response.headers.get("openai-poll-after-ms") + if from_header is not None: + poll_interval_ms = int(from_header) + else: + poll_interval_ms = 1000 + + await self._sleep(poll_interval_ms / 1000) + continue + + return batch + + async def upload_and_poll( + self, + vector_store_id: str, + *, + files: Iterable[FileTypes], + max_concurrency: int = 5, + file_ids: List[str] = [], + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileBatch: + """Uploads the given files concurrently and then creates a vector store file batch. + + If you've already uploaded certain files that you want to include in this batch + then you can pass their IDs through the `file_ids` argument. + + By default, if any file upload fails then an exception will be eagerly raised. + + The number of concurrency uploads is configurable using the `max_concurrency` + parameter. + + Note: this method only supports `asyncio` or `trio` as the backing async + runtime. + """ + uploaded_files: list[FileObject] = [] + + async_library = sniffio.current_async_library() + + if async_library == "asyncio": + + async def asyncio_upload_file(semaphore: asyncio.Semaphore, file: FileTypes) -> None: + async with semaphore: + file_obj = await self._client.files.create( + file=file, + purpose="assistants", + ) + uploaded_files.append(file_obj) + + semaphore = asyncio.Semaphore(max_concurrency) + + tasks = [asyncio_upload_file(semaphore, file) for file in files] + + await asyncio.gather(*tasks) + elif async_library == "trio": + # We only import if the library is being used. + # We support Python 3.7 so are using an older version of trio that does not have type information + import trio # type: ignore # pyright: ignore[reportMissingTypeStubs] + + async def trio_upload_file(limiter: trio.CapacityLimiter, file: FileTypes) -> None: + async with limiter: + file_obj = await self._client.files.create( + file=file, + purpose="assistants", + ) + uploaded_files.append(file_obj) + + limiter = trio.CapacityLimiter(max_concurrency) + + async with trio.open_nursery() as nursery: + for file in files: + nursery.start_soon(trio_upload_file, limiter, file) # pyright: ignore [reportUnknownMemberType] + else: + raise RuntimeError( + f"Async runtime {async_library} is not supported yet. Only asyncio or trio is supported", + ) + + batch = await self.create_and_poll( + vector_store_id=vector_store_id, + file_ids=[*file_ids, *(f.id for f in uploaded_files)], + poll_interval_ms=poll_interval_ms, + ) + return batch + + +class FileBatchesWithRawResponse: + def __init__(self, file_batches: FileBatches) -> None: + self._file_batches = file_batches + + self.create = _legacy_response.to_raw_response_wrapper( + file_batches.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + file_batches.retrieve, + ) + self.cancel = _legacy_response.to_raw_response_wrapper( + file_batches.cancel, + ) + self.list_files = _legacy_response.to_raw_response_wrapper( + file_batches.list_files, + ) + + +class AsyncFileBatchesWithRawResponse: + def __init__(self, file_batches: AsyncFileBatches) -> None: + self._file_batches = file_batches + + self.create = _legacy_response.async_to_raw_response_wrapper( + file_batches.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + file_batches.retrieve, + ) + self.cancel = _legacy_response.async_to_raw_response_wrapper( + file_batches.cancel, + ) + self.list_files = _legacy_response.async_to_raw_response_wrapper( + file_batches.list_files, + ) + + +class FileBatchesWithStreamingResponse: + def __init__(self, file_batches: FileBatches) -> None: + self._file_batches = file_batches + + self.create = to_streamed_response_wrapper( + file_batches.create, + ) + self.retrieve = to_streamed_response_wrapper( + file_batches.retrieve, + ) + self.cancel = to_streamed_response_wrapper( + file_batches.cancel, + ) + self.list_files = to_streamed_response_wrapper( + file_batches.list_files, + ) + + +class AsyncFileBatchesWithStreamingResponse: + def __init__(self, file_batches: AsyncFileBatches) -> None: + self._file_batches = file_batches + + self.create = async_to_streamed_response_wrapper( + file_batches.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + file_batches.retrieve, + ) + self.cancel = async_to_streamed_response_wrapper( + file_batches.cancel, + ) + self.list_files = async_to_streamed_response_wrapper( + file_batches.list_files, + ) diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/files.py b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/files.py new file mode 100644 index 00000000..5c3db276 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/files.py @@ -0,0 +1,675 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import TYPE_CHECKING +from typing_extensions import Literal, assert_never + +import httpx + +from .... import _legacy_response +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes +from ...._utils import ( + is_given, + maybe_transform, + async_maybe_transform, +) +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ....pagination import SyncCursorPage, AsyncCursorPage +from ...._base_client import ( + AsyncPaginator, + make_request_options, +) +from ....types.beta.vector_stores import file_list_params, file_create_params +from ....types.beta.vector_stores.vector_store_file import VectorStoreFile +from ....types.beta.vector_stores.vector_store_file_deleted import VectorStoreFileDeleted + +__all__ = ["Files", "AsyncFiles"] + + +class Files(SyncAPIResource): + @cached_property + def with_raw_response(self) -> FilesWithRawResponse: + return FilesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> FilesWithStreamingResponse: + return FilesWithStreamingResponse(self) + + def create( + self, + vector_store_id: str, + *, + file_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """ + Create a vector store file by attaching a + [File](https://platform.openai.com/docs/api-reference/files) to a + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + + Args: + file_id: A [File](https://platform.openai.com/docs/api-reference/files) ID that the + vector store should use. Useful for tools like `file_search` that can access + files. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/vector_stores/{vector_store_id}/files", + body=maybe_transform({"file_id": file_id}, file_create_params.FileCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFile, + ) + + def retrieve( + self, + file_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """ + Retrieves a vector store file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/vector_stores/{vector_store_id}/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFile, + ) + + def list( + self, + vector_store_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + filter: Literal["in_progress", "completed", "failed", "cancelled"] | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[VectorStoreFile]: + """ + Returns a list of vector store files. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + filter: Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/vector_stores/{vector_store_id}/files", + page=SyncCursorPage[VectorStoreFile], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "filter": filter, + "limit": limit, + "order": order, + }, + file_list_params.FileListParams, + ), + ), + model=VectorStoreFile, + ) + + def delete( + self, + file_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileDeleted: + """Delete a vector store file. + + This will remove the file from the vector store but + the file itself will not be deleted. To delete the file, use the + [delete file](https://platform.openai.com/docs/api-reference/files/delete) + endpoint. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._delete( + f"/vector_stores/{vector_store_id}/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileDeleted, + ) + + def create_and_poll( + self, + file_id: str, + *, + vector_store_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """Attach a file to the given vector store and wait for it to be processed.""" + self.create(vector_store_id=vector_store_id, file_id=file_id) + + return self.poll( + file_id, + vector_store_id=vector_store_id, + poll_interval_ms=poll_interval_ms, + ) + + def poll( + self, + file_id: str, + *, + vector_store_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """Wait for the vector store file to finish processing. + + Note: this will return even if the file failed to process, you need to check + file.last_error and file.status to handle these cases + """ + headers: dict[str, str] = {"X-Stainless-Poll-Helper": "true"} + if is_given(poll_interval_ms): + headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms) + + while True: + response = self.with_raw_response.retrieve( + file_id, + vector_store_id=vector_store_id, + extra_headers=headers, + ) + + file = response.parse() + if file.status == "in_progress": + if not is_given(poll_interval_ms): + from_header = response.headers.get("openai-poll-after-ms") + if from_header is not None: + poll_interval_ms = int(from_header) + else: + poll_interval_ms = 1000 + + self._sleep(poll_interval_ms / 1000) + elif file.status == "cancelled" or file.status == "completed" or file.status == "failed": + return file + else: + if TYPE_CHECKING: # type: ignore[unreachable] + assert_never(file.status) + else: + return file + + def upload( + self, + *, + vector_store_id: str, + file: FileTypes, + ) -> VectorStoreFile: + """Upload a file to the `files` API and then attach it to the given vector store. + + Note the file will be asynchronously processed (you can use the alternative + polling helper method to wait for processing to complete). + """ + file_obj = self._client.files.create(file=file, purpose="assistants") + return self.create(vector_store_id=vector_store_id, file_id=file_obj.id) + + def upload_and_poll( + self, + *, + vector_store_id: str, + file: FileTypes, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """Add a file to a vector store and poll until processing is complete.""" + file_obj = self._client.files.create(file=file, purpose="assistants") + return self.create_and_poll( + vector_store_id=vector_store_id, + file_id=file_obj.id, + poll_interval_ms=poll_interval_ms, + ) + + +class AsyncFiles(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncFilesWithRawResponse: + return AsyncFilesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncFilesWithStreamingResponse: + return AsyncFilesWithStreamingResponse(self) + + async def create( + self, + vector_store_id: str, + *, + file_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """ + Create a vector store file by attaching a + [File](https://platform.openai.com/docs/api-reference/files) to a + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + + Args: + file_id: A [File](https://platform.openai.com/docs/api-reference/files) ID that the + vector store should use. Useful for tools like `file_search` that can access + files. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/vector_stores/{vector_store_id}/files", + body=await async_maybe_transform({"file_id": file_id}, file_create_params.FileCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFile, + ) + + async def retrieve( + self, + file_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """ + Retrieves a vector store file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/vector_stores/{vector_store_id}/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFile, + ) + + def list( + self, + vector_store_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + filter: Literal["in_progress", "completed", "failed", "cancelled"] | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[VectorStoreFile, AsyncCursorPage[VectorStoreFile]]: + """ + Returns a list of vector store files. + + Args: + after: A cursor for use in pagination. `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + filter: Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + f"/vector_stores/{vector_store_id}/files", + page=AsyncCursorPage[VectorStoreFile], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "filter": filter, + "limit": limit, + "order": order, + }, + file_list_params.FileListParams, + ), + ), + model=VectorStoreFile, + ) + + async def delete( + self, + file_id: str, + *, + vector_store_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreFileDeleted: + """Delete a vector store file. + + This will remove the file from the vector store but + the file itself will not be deleted. To delete the file, use the + [delete file](https://platform.openai.com/docs/api-reference/files/delete) + endpoint. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._delete( + f"/vector_stores/{vector_store_id}/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreFileDeleted, + ) + + async def create_and_poll( + self, + file_id: str, + *, + vector_store_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """Attach a file to the given vector store and wait for it to be processed.""" + await self.create(vector_store_id=vector_store_id, file_id=file_id) + + return await self.poll( + file_id, + vector_store_id=vector_store_id, + poll_interval_ms=poll_interval_ms, + ) + + async def poll( + self, + file_id: str, + *, + vector_store_id: str, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """Wait for the vector store file to finish processing. + + Note: this will return even if the file failed to process, you need to check + file.last_error and file.status to handle these cases + """ + headers: dict[str, str] = {"X-Stainless-Poll-Helper": "true"} + if is_given(poll_interval_ms): + headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms) + + while True: + response = await self.with_raw_response.retrieve( + file_id, + vector_store_id=vector_store_id, + extra_headers=headers, + ) + + file = response.parse() + if file.status == "in_progress": + if not is_given(poll_interval_ms): + from_header = response.headers.get("openai-poll-after-ms") + if from_header is not None: + poll_interval_ms = int(from_header) + else: + poll_interval_ms = 1000 + + await self._sleep(poll_interval_ms / 1000) + elif file.status == "cancelled" or file.status == "completed" or file.status == "failed": + return file + else: + if TYPE_CHECKING: # type: ignore[unreachable] + assert_never(file.status) + else: + return file + + async def upload( + self, + *, + vector_store_id: str, + file: FileTypes, + ) -> VectorStoreFile: + """Upload a file to the `files` API and then attach it to the given vector store. + + Note the file will be asynchronously processed (you can use the alternative + polling helper method to wait for processing to complete). + """ + file_obj = await self._client.files.create(file=file, purpose="assistants") + return await self.create(vector_store_id=vector_store_id, file_id=file_obj.id) + + async def upload_and_poll( + self, + *, + vector_store_id: str, + file: FileTypes, + poll_interval_ms: int | NotGiven = NOT_GIVEN, + ) -> VectorStoreFile: + """Add a file to a vector store and poll until processing is complete.""" + file_obj = await self._client.files.create(file=file, purpose="assistants") + return await self.create_and_poll( + vector_store_id=vector_store_id, + file_id=file_obj.id, + poll_interval_ms=poll_interval_ms, + ) + + +class FilesWithRawResponse: + def __init__(self, files: Files) -> None: + self._files = files + + self.create = _legacy_response.to_raw_response_wrapper( + files.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + files.retrieve, + ) + self.list = _legacy_response.to_raw_response_wrapper( + files.list, + ) + self.delete = _legacy_response.to_raw_response_wrapper( + files.delete, + ) + + +class AsyncFilesWithRawResponse: + def __init__(self, files: AsyncFiles) -> None: + self._files = files + + self.create = _legacy_response.async_to_raw_response_wrapper( + files.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + files.retrieve, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + files.list, + ) + self.delete = _legacy_response.async_to_raw_response_wrapper( + files.delete, + ) + + +class FilesWithStreamingResponse: + def __init__(self, files: Files) -> None: + self._files = files + + self.create = to_streamed_response_wrapper( + files.create, + ) + self.retrieve = to_streamed_response_wrapper( + files.retrieve, + ) + self.list = to_streamed_response_wrapper( + files.list, + ) + self.delete = to_streamed_response_wrapper( + files.delete, + ) + + +class AsyncFilesWithStreamingResponse: + def __init__(self, files: AsyncFiles) -> None: + self._files = files + + self.create = async_to_streamed_response_wrapper( + files.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + files.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + files.list, + ) + self.delete = async_to_streamed_response_wrapper( + files.delete, + ) diff --git a/.venv/Lib/site-packages/openai/resources/beta/vector_stores/vector_stores.py b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/vector_stores.py new file mode 100644 index 00000000..8a177c28 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/beta/vector_stores/vector_stores.py @@ -0,0 +1,684 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from typing_extensions import Literal + +import httpx + +from .... import _legacy_response +from .files import ( + Files, + AsyncFiles, + FilesWithRawResponse, + AsyncFilesWithRawResponse, + FilesWithStreamingResponse, + AsyncFilesWithStreamingResponse, +) +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._utils import ( + maybe_transform, + async_maybe_transform, +) +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .file_batches import ( + FileBatches, + AsyncFileBatches, + FileBatchesWithRawResponse, + AsyncFileBatchesWithRawResponse, + FileBatchesWithStreamingResponse, + AsyncFileBatchesWithStreamingResponse, +) +from ....pagination import SyncCursorPage, AsyncCursorPage +from ....types.beta import vector_store_list_params, vector_store_create_params, vector_store_update_params +from ...._base_client import ( + AsyncPaginator, + make_request_options, +) +from ....types.beta.vector_store import VectorStore +from ....types.beta.vector_store_deleted import VectorStoreDeleted + +__all__ = ["VectorStores", "AsyncVectorStores"] + + +class VectorStores(SyncAPIResource): + @cached_property + def files(self) -> Files: + return Files(self._client) + + @cached_property + def file_batches(self) -> FileBatches: + return FileBatches(self._client) + + @cached_property + def with_raw_response(self) -> VectorStoresWithRawResponse: + return VectorStoresWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> VectorStoresWithStreamingResponse: + return VectorStoresWithStreamingResponse(self) + + def create( + self, + *, + expires_after: vector_store_create_params.ExpiresAfter | NotGiven = NOT_GIVEN, + file_ids: List[str] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStore: + """ + Create a vector store. + + Args: + expires_after: The expiration policy for a vector store. + + file_ids: A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + the vector store should use. Useful for tools like `file_search` that can access + files. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + name: The name of the vector store. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + "/vector_stores", + body=maybe_transform( + { + "expires_after": expires_after, + "file_ids": file_ids, + "metadata": metadata, + "name": name, + }, + vector_store_create_params.VectorStoreCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStore, + ) + + def retrieve( + self, + vector_store_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStore: + """ + Retrieves a vector store. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get( + f"/vector_stores/{vector_store_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStore, + ) + + def update( + self, + vector_store_id: str, + *, + expires_after: Optional[vector_store_update_params.ExpiresAfter] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStore: + """ + Modifies a vector store. + + Args: + expires_after: The expiration policy for a vector store. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + name: The name of the vector store. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._post( + f"/vector_stores/{vector_store_id}", + body=maybe_transform( + { + "expires_after": expires_after, + "metadata": metadata, + "name": name, + }, + vector_store_update_params.VectorStoreUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStore, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[VectorStore]: + """Returns a list of vector stores. + + Args: + after: A cursor for use in pagination. + + `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + "/vector_stores", + page=SyncCursorPage[VectorStore], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + vector_store_list_params.VectorStoreListParams, + ), + ), + model=VectorStore, + ) + + def delete( + self, + vector_store_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreDeleted: + """ + Delete a vector store. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._delete( + f"/vector_stores/{vector_store_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreDeleted, + ) + + +class AsyncVectorStores(AsyncAPIResource): + @cached_property + def files(self) -> AsyncFiles: + return AsyncFiles(self._client) + + @cached_property + def file_batches(self) -> AsyncFileBatches: + return AsyncFileBatches(self._client) + + @cached_property + def with_raw_response(self) -> AsyncVectorStoresWithRawResponse: + return AsyncVectorStoresWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncVectorStoresWithStreamingResponse: + return AsyncVectorStoresWithStreamingResponse(self) + + async def create( + self, + *, + expires_after: vector_store_create_params.ExpiresAfter | NotGiven = NOT_GIVEN, + file_ids: List[str] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStore: + """ + Create a vector store. + + Args: + expires_after: The expiration policy for a vector store. + + file_ids: A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + the vector store should use. Useful for tools like `file_search` that can access + files. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + name: The name of the vector store. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + "/vector_stores", + body=await async_maybe_transform( + { + "expires_after": expires_after, + "file_ids": file_ids, + "metadata": metadata, + "name": name, + }, + vector_store_create_params.VectorStoreCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStore, + ) + + async def retrieve( + self, + vector_store_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStore: + """ + Retrieves a vector store. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._get( + f"/vector_stores/{vector_store_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStore, + ) + + async def update( + self, + vector_store_id: str, + *, + expires_after: Optional[vector_store_update_params.ExpiresAfter] | NotGiven = NOT_GIVEN, + metadata: Optional[object] | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStore: + """ + Modifies a vector store. + + Args: + expires_after: The expiration policy for a vector store. + + metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful + for storing additional information about the object in a structured format. Keys + can be a maximum of 64 characters long and values can be a maxium of 512 + characters long. + + name: The name of the vector store. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._post( + f"/vector_stores/{vector_store_id}", + body=await async_maybe_transform( + { + "expires_after": expires_after, + "metadata": metadata, + "name": name, + }, + vector_store_update_params.VectorStoreUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStore, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + before: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[VectorStore, AsyncCursorPage[VectorStore]]: + """Returns a list of vector stores. + + Args: + after: A cursor for use in pagination. + + `after` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include after=obj_foo in order to + fetch the next page of the list. + + before: A cursor for use in pagination. `before` is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, + ending with obj_foo, your subsequent call can include before=obj_foo in order to + fetch the previous page of the list. + + limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. + + order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return self._get_api_list( + "/vector_stores", + page=AsyncCursorPage[VectorStore], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "before": before, + "limit": limit, + "order": order, + }, + vector_store_list_params.VectorStoreListParams, + ), + ), + model=VectorStore, + ) + + async def delete( + self, + vector_store_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> VectorStoreDeleted: + """ + Delete a vector store. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not vector_store_id: + raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}") + extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})} + return await self._delete( + f"/vector_stores/{vector_store_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=VectorStoreDeleted, + ) + + +class VectorStoresWithRawResponse: + def __init__(self, vector_stores: VectorStores) -> None: + self._vector_stores = vector_stores + + self.create = _legacy_response.to_raw_response_wrapper( + vector_stores.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + vector_stores.retrieve, + ) + self.update = _legacy_response.to_raw_response_wrapper( + vector_stores.update, + ) + self.list = _legacy_response.to_raw_response_wrapper( + vector_stores.list, + ) + self.delete = _legacy_response.to_raw_response_wrapper( + vector_stores.delete, + ) + + @cached_property + def files(self) -> FilesWithRawResponse: + return FilesWithRawResponse(self._vector_stores.files) + + @cached_property + def file_batches(self) -> FileBatchesWithRawResponse: + return FileBatchesWithRawResponse(self._vector_stores.file_batches) + + +class AsyncVectorStoresWithRawResponse: + def __init__(self, vector_stores: AsyncVectorStores) -> None: + self._vector_stores = vector_stores + + self.create = _legacy_response.async_to_raw_response_wrapper( + vector_stores.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + vector_stores.retrieve, + ) + self.update = _legacy_response.async_to_raw_response_wrapper( + vector_stores.update, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + vector_stores.list, + ) + self.delete = _legacy_response.async_to_raw_response_wrapper( + vector_stores.delete, + ) + + @cached_property + def files(self) -> AsyncFilesWithRawResponse: + return AsyncFilesWithRawResponse(self._vector_stores.files) + + @cached_property + def file_batches(self) -> AsyncFileBatchesWithRawResponse: + return AsyncFileBatchesWithRawResponse(self._vector_stores.file_batches) + + +class VectorStoresWithStreamingResponse: + def __init__(self, vector_stores: VectorStores) -> None: + self._vector_stores = vector_stores + + self.create = to_streamed_response_wrapper( + vector_stores.create, + ) + self.retrieve = to_streamed_response_wrapper( + vector_stores.retrieve, + ) + self.update = to_streamed_response_wrapper( + vector_stores.update, + ) + self.list = to_streamed_response_wrapper( + vector_stores.list, + ) + self.delete = to_streamed_response_wrapper( + vector_stores.delete, + ) + + @cached_property + def files(self) -> FilesWithStreamingResponse: + return FilesWithStreamingResponse(self._vector_stores.files) + + @cached_property + def file_batches(self) -> FileBatchesWithStreamingResponse: + return FileBatchesWithStreamingResponse(self._vector_stores.file_batches) + + +class AsyncVectorStoresWithStreamingResponse: + def __init__(self, vector_stores: AsyncVectorStores) -> None: + self._vector_stores = vector_stores + + self.create = async_to_streamed_response_wrapper( + vector_stores.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + vector_stores.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + vector_stores.update, + ) + self.list = async_to_streamed_response_wrapper( + vector_stores.list, + ) + self.delete = async_to_streamed_response_wrapper( + vector_stores.delete, + ) + + @cached_property + def files(self) -> AsyncFilesWithStreamingResponse: + return AsyncFilesWithStreamingResponse(self._vector_stores.files) + + @cached_property + def file_batches(self) -> AsyncFileBatchesWithStreamingResponse: + return AsyncFileBatchesWithStreamingResponse(self._vector_stores.file_batches) diff --git a/.venv/Lib/site-packages/openai/resources/chat/__init__.py b/.venv/Lib/site-packages/openai/resources/chat/__init__.py new file mode 100644 index 00000000..52dfdcea --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/chat/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .chat import ( + Chat, + AsyncChat, + ChatWithRawResponse, + AsyncChatWithRawResponse, + ChatWithStreamingResponse, + AsyncChatWithStreamingResponse, +) +from .completions import ( + Completions, + AsyncCompletions, + CompletionsWithRawResponse, + AsyncCompletionsWithRawResponse, + CompletionsWithStreamingResponse, + AsyncCompletionsWithStreamingResponse, +) + +__all__ = [ + "Completions", + "AsyncCompletions", + "CompletionsWithRawResponse", + "AsyncCompletionsWithRawResponse", + "CompletionsWithStreamingResponse", + "AsyncCompletionsWithStreamingResponse", + "Chat", + "AsyncChat", + "ChatWithRawResponse", + "AsyncChatWithRawResponse", + "ChatWithStreamingResponse", + "AsyncChatWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/chat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/chat/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..4a260928 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/chat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/chat/__pycache__/chat.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/chat/__pycache__/chat.cpython-311.pyc new file mode 100644 index 00000000..7cbe314c Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/chat/__pycache__/chat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/chat/__pycache__/completions.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/chat/__pycache__/completions.cpython-311.pyc new file mode 100644 index 00000000..f886aa6c Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/chat/__pycache__/completions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/chat/chat.py b/.venv/Lib/site-packages/openai/resources/chat/chat.py new file mode 100644 index 00000000..d14d0555 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/chat/chat.py @@ -0,0 +1,80 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from .completions import ( + Completions, + AsyncCompletions, + CompletionsWithRawResponse, + AsyncCompletionsWithRawResponse, + CompletionsWithStreamingResponse, + AsyncCompletionsWithStreamingResponse, +) + +__all__ = ["Chat", "AsyncChat"] + + +class Chat(SyncAPIResource): + @cached_property + def completions(self) -> Completions: + return Completions(self._client) + + @cached_property + def with_raw_response(self) -> ChatWithRawResponse: + return ChatWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ChatWithStreamingResponse: + return ChatWithStreamingResponse(self) + + +class AsyncChat(AsyncAPIResource): + @cached_property + def completions(self) -> AsyncCompletions: + return AsyncCompletions(self._client) + + @cached_property + def with_raw_response(self) -> AsyncChatWithRawResponse: + return AsyncChatWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncChatWithStreamingResponse: + return AsyncChatWithStreamingResponse(self) + + +class ChatWithRawResponse: + def __init__(self, chat: Chat) -> None: + self._chat = chat + + @cached_property + def completions(self) -> CompletionsWithRawResponse: + return CompletionsWithRawResponse(self._chat.completions) + + +class AsyncChatWithRawResponse: + def __init__(self, chat: AsyncChat) -> None: + self._chat = chat + + @cached_property + def completions(self) -> AsyncCompletionsWithRawResponse: + return AsyncCompletionsWithRawResponse(self._chat.completions) + + +class ChatWithStreamingResponse: + def __init__(self, chat: Chat) -> None: + self._chat = chat + + @cached_property + def completions(self) -> CompletionsWithStreamingResponse: + return CompletionsWithStreamingResponse(self._chat.completions) + + +class AsyncChatWithStreamingResponse: + def __init__(self, chat: AsyncChat) -> None: + self._chat = chat + + @cached_property + def completions(self) -> AsyncCompletionsWithStreamingResponse: + return AsyncCompletionsWithStreamingResponse(self._chat.completions) diff --git a/.venv/Lib/site-packages/openai/resources/chat/completions.py b/.venv/Lib/site-packages/openai/resources/chat/completions.py new file mode 100644 index 00000000..5104cd61 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/chat/completions.py @@ -0,0 +1,1228 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Union, Iterable, Optional, overload +from typing_extensions import Literal + +import httpx + +from ... import _legacy_response +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import ( + required_args, + maybe_transform, + async_maybe_transform, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ..._streaming import Stream, AsyncStream +from ...types.chat import completion_create_params +from ..._base_client import ( + make_request_options, +) +from ...types.chat_model import ChatModel +from ...types.chat.chat_completion import ChatCompletion +from ...types.chat.chat_completion_chunk import ChatCompletionChunk +from ...types.chat.chat_completion_tool_param import ChatCompletionToolParam +from ...types.chat.chat_completion_message_param import ChatCompletionMessageParam +from ...types.chat.chat_completion_tool_choice_option_param import ChatCompletionToolChoiceOptionParam + +__all__ = ["Completions", "AsyncCompletions"] + + +class Completions(SyncAPIResource): + @cached_property + def with_raw_response(self) -> CompletionsWithRawResponse: + return CompletionsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> CompletionsWithStreamingResponse: + return CompletionsWithStreamingResponse(self) + + @overload + def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ChatCompletion: + """ + Creates a model response for the given chat conversation. + + Args: + messages: A list of messages comprising the conversation so far. + [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). + + model: ID of the model to use. See the + [model endpoint compatibility](https://platform.openai.com/docs/models/model-endpoint-compatibility) + table for details on which models work with the Chat API. + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + function_call: Deprecated in favor of `tool_choice`. + + Controls which (if any) function is called by the model. `none` means the model + will not call a function and instead generates a message. `auto` means the model + can pick between generating a message or calling a function. Specifying a + particular function via `{"name": "my_function"}` forces the model to call that + function. + + `none` is the default when no functions are present. `auto` is the default if + functions are present. + + functions: Deprecated in favor of `tools`. + + A list of functions the model may generate JSON inputs for. + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the + tokenizer) to an associated bias value from -100 to 100. Mathematically, the + bias is added to the logits generated by the model prior to sampling. The exact + effect will vary per model, but values between -1 and 1 should decrease or + increase likelihood of selection; values like -100 or 100 should result in a ban + or exclusive selection of the relevant token. + + logprobs: Whether to return log probabilities of the output tokens or not. If true, + returns the log probabilities of each output token returned in the `content` of + `message`. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the chat + completion. + + The total length of input tokens and generated tokens is limited by the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many chat completion choices to generate for each input message. Note that + you will be charged based on the number of generated tokens across all of the + choices. Keep `n` as `1` to minimize costs. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + response_format: An object specifying the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + seed: This feature is in Beta. If specified, our system will make a best effort to + sample deterministically, such that repeated requests with the same `seed` and + parameters should return the same result. Determinism is not guaranteed, and you + should refer to the `system_fingerprint` response parameter to monitor changes + in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. + + stream: If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tool and instead generates a message. `auto` means the model can + pick between generating a message or calling one or more tools. `required` means + the model must call one or more tools. Specifying a particular tool via + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + `none` is the default when no tools are present. `auto` is the default if tools + are present. + + tools: A list of tools the model may call. Currently, only functions are supported as a + tool. Use this to provide a list of functions the model may generate JSON inputs + for. A max of 128 functions are supported. + + top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + `logprobs` must be set to `true` if this parameter is used. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + stream: Literal[True], + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[ChatCompletionChunk]: + """ + Creates a model response for the given chat conversation. + + Args: + messages: A list of messages comprising the conversation so far. + [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). + + model: ID of the model to use. See the + [model endpoint compatibility](https://platform.openai.com/docs/models/model-endpoint-compatibility) + table for details on which models work with the Chat API. + + stream: If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + function_call: Deprecated in favor of `tool_choice`. + + Controls which (if any) function is called by the model. `none` means the model + will not call a function and instead generates a message. `auto` means the model + can pick between generating a message or calling a function. Specifying a + particular function via `{"name": "my_function"}` forces the model to call that + function. + + `none` is the default when no functions are present. `auto` is the default if + functions are present. + + functions: Deprecated in favor of `tools`. + + A list of functions the model may generate JSON inputs for. + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the + tokenizer) to an associated bias value from -100 to 100. Mathematically, the + bias is added to the logits generated by the model prior to sampling. The exact + effect will vary per model, but values between -1 and 1 should decrease or + increase likelihood of selection; values like -100 or 100 should result in a ban + or exclusive selection of the relevant token. + + logprobs: Whether to return log probabilities of the output tokens or not. If true, + returns the log probabilities of each output token returned in the `content` of + `message`. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the chat + completion. + + The total length of input tokens and generated tokens is limited by the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many chat completion choices to generate for each input message. Note that + you will be charged based on the number of generated tokens across all of the + choices. Keep `n` as `1` to minimize costs. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + response_format: An object specifying the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + seed: This feature is in Beta. If specified, our system will make a best effort to + sample deterministically, such that repeated requests with the same `seed` and + parameters should return the same result. Determinism is not guaranteed, and you + should refer to the `system_fingerprint` response parameter to monitor changes + in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tool and instead generates a message. `auto` means the model can + pick between generating a message or calling one or more tools. `required` means + the model must call one or more tools. Specifying a particular tool via + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + `none` is the default when no tools are present. `auto` is the default if tools + are present. + + tools: A list of tools the model may call. Currently, only functions are supported as a + tool. Use this to provide a list of functions the model may generate JSON inputs + for. A max of 128 functions are supported. + + top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + `logprobs` must be set to `true` if this parameter is used. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + stream: bool, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ChatCompletion | Stream[ChatCompletionChunk]: + """ + Creates a model response for the given chat conversation. + + Args: + messages: A list of messages comprising the conversation so far. + [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). + + model: ID of the model to use. See the + [model endpoint compatibility](https://platform.openai.com/docs/models/model-endpoint-compatibility) + table for details on which models work with the Chat API. + + stream: If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + function_call: Deprecated in favor of `tool_choice`. + + Controls which (if any) function is called by the model. `none` means the model + will not call a function and instead generates a message. `auto` means the model + can pick between generating a message or calling a function. Specifying a + particular function via `{"name": "my_function"}` forces the model to call that + function. + + `none` is the default when no functions are present. `auto` is the default if + functions are present. + + functions: Deprecated in favor of `tools`. + + A list of functions the model may generate JSON inputs for. + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the + tokenizer) to an associated bias value from -100 to 100. Mathematically, the + bias is added to the logits generated by the model prior to sampling. The exact + effect will vary per model, but values between -1 and 1 should decrease or + increase likelihood of selection; values like -100 or 100 should result in a ban + or exclusive selection of the relevant token. + + logprobs: Whether to return log probabilities of the output tokens or not. If true, + returns the log probabilities of each output token returned in the `content` of + `message`. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the chat + completion. + + The total length of input tokens and generated tokens is limited by the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many chat completion choices to generate for each input message. Note that + you will be charged based on the number of generated tokens across all of the + choices. Keep `n` as `1` to minimize costs. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + response_format: An object specifying the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + seed: This feature is in Beta. If specified, our system will make a best effort to + sample deterministically, such that repeated requests with the same `seed` and + parameters should return the same result. Determinism is not guaranteed, and you + should refer to the `system_fingerprint` response parameter to monitor changes + in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tool and instead generates a message. `auto` means the model can + pick between generating a message or calling one or more tools. `required` means + the model must call one or more tools. Specifying a particular tool via + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + `none` is the default when no tools are present. `auto` is the default if tools + are present. + + tools: A list of tools the model may call. Currently, only functions are supported as a + tool. Use this to provide a list of functions the model may generate JSON inputs + for. A max of 128 functions are supported. + + top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + `logprobs` must be set to `true` if this parameter is used. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["messages", "model"], ["messages", "model", "stream"]) + def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ChatCompletion | Stream[ChatCompletionChunk]: + return self._post( + "/chat/completions", + body=maybe_transform( + { + "messages": messages, + "model": model, + "frequency_penalty": frequency_penalty, + "function_call": function_call, + "functions": functions, + "logit_bias": logit_bias, + "logprobs": logprobs, + "max_tokens": max_tokens, + "n": n, + "presence_penalty": presence_penalty, + "response_format": response_format, + "seed": seed, + "stop": stop, + "stream": stream, + "temperature": temperature, + "tool_choice": tool_choice, + "tools": tools, + "top_logprobs": top_logprobs, + "top_p": top_p, + "user": user, + }, + completion_create_params.CompletionCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ChatCompletion, + stream=stream or False, + stream_cls=Stream[ChatCompletionChunk], + ) + + +class AsyncCompletions(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncCompletionsWithRawResponse: + return AsyncCompletionsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncCompletionsWithStreamingResponse: + return AsyncCompletionsWithStreamingResponse(self) + + @overload + async def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ChatCompletion: + """ + Creates a model response for the given chat conversation. + + Args: + messages: A list of messages comprising the conversation so far. + [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). + + model: ID of the model to use. See the + [model endpoint compatibility](https://platform.openai.com/docs/models/model-endpoint-compatibility) + table for details on which models work with the Chat API. + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + function_call: Deprecated in favor of `tool_choice`. + + Controls which (if any) function is called by the model. `none` means the model + will not call a function and instead generates a message. `auto` means the model + can pick between generating a message or calling a function. Specifying a + particular function via `{"name": "my_function"}` forces the model to call that + function. + + `none` is the default when no functions are present. `auto` is the default if + functions are present. + + functions: Deprecated in favor of `tools`. + + A list of functions the model may generate JSON inputs for. + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the + tokenizer) to an associated bias value from -100 to 100. Mathematically, the + bias is added to the logits generated by the model prior to sampling. The exact + effect will vary per model, but values between -1 and 1 should decrease or + increase likelihood of selection; values like -100 or 100 should result in a ban + or exclusive selection of the relevant token. + + logprobs: Whether to return log probabilities of the output tokens or not. If true, + returns the log probabilities of each output token returned in the `content` of + `message`. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the chat + completion. + + The total length of input tokens and generated tokens is limited by the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many chat completion choices to generate for each input message. Note that + you will be charged based on the number of generated tokens across all of the + choices. Keep `n` as `1` to minimize costs. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + response_format: An object specifying the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + seed: This feature is in Beta. If specified, our system will make a best effort to + sample deterministically, such that repeated requests with the same `seed` and + parameters should return the same result. Determinism is not guaranteed, and you + should refer to the `system_fingerprint` response parameter to monitor changes + in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. + + stream: If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tool and instead generates a message. `auto` means the model can + pick between generating a message or calling one or more tools. `required` means + the model must call one or more tools. Specifying a particular tool via + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + `none` is the default when no tools are present. `auto` is the default if tools + are present. + + tools: A list of tools the model may call. Currently, only functions are supported as a + tool. Use this to provide a list of functions the model may generate JSON inputs + for. A max of 128 functions are supported. + + top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + `logprobs` must be set to `true` if this parameter is used. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + stream: Literal[True], + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[ChatCompletionChunk]: + """ + Creates a model response for the given chat conversation. + + Args: + messages: A list of messages comprising the conversation so far. + [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). + + model: ID of the model to use. See the + [model endpoint compatibility](https://platform.openai.com/docs/models/model-endpoint-compatibility) + table for details on which models work with the Chat API. + + stream: If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + function_call: Deprecated in favor of `tool_choice`. + + Controls which (if any) function is called by the model. `none` means the model + will not call a function and instead generates a message. `auto` means the model + can pick between generating a message or calling a function. Specifying a + particular function via `{"name": "my_function"}` forces the model to call that + function. + + `none` is the default when no functions are present. `auto` is the default if + functions are present. + + functions: Deprecated in favor of `tools`. + + A list of functions the model may generate JSON inputs for. + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the + tokenizer) to an associated bias value from -100 to 100. Mathematically, the + bias is added to the logits generated by the model prior to sampling. The exact + effect will vary per model, but values between -1 and 1 should decrease or + increase likelihood of selection; values like -100 or 100 should result in a ban + or exclusive selection of the relevant token. + + logprobs: Whether to return log probabilities of the output tokens or not. If true, + returns the log probabilities of each output token returned in the `content` of + `message`. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the chat + completion. + + The total length of input tokens and generated tokens is limited by the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many chat completion choices to generate for each input message. Note that + you will be charged based on the number of generated tokens across all of the + choices. Keep `n` as `1` to minimize costs. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + response_format: An object specifying the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + seed: This feature is in Beta. If specified, our system will make a best effort to + sample deterministically, such that repeated requests with the same `seed` and + parameters should return the same result. Determinism is not guaranteed, and you + should refer to the `system_fingerprint` response parameter to monitor changes + in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tool and instead generates a message. `auto` means the model can + pick between generating a message or calling one or more tools. `required` means + the model must call one or more tools. Specifying a particular tool via + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + `none` is the default when no tools are present. `auto` is the default if tools + are present. + + tools: A list of tools the model may call. Currently, only functions are supported as a + tool. Use this to provide a list of functions the model may generate JSON inputs + for. A max of 128 functions are supported. + + top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + `logprobs` must be set to `true` if this parameter is used. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + stream: bool, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ChatCompletion | AsyncStream[ChatCompletionChunk]: + """ + Creates a model response for the given chat conversation. + + Args: + messages: A list of messages comprising the conversation so far. + [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). + + model: ID of the model to use. See the + [model endpoint compatibility](https://platform.openai.com/docs/models/model-endpoint-compatibility) + table for details on which models work with the Chat API. + + stream: If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + function_call: Deprecated in favor of `tool_choice`. + + Controls which (if any) function is called by the model. `none` means the model + will not call a function and instead generates a message. `auto` means the model + can pick between generating a message or calling a function. Specifying a + particular function via `{"name": "my_function"}` forces the model to call that + function. + + `none` is the default when no functions are present. `auto` is the default if + functions are present. + + functions: Deprecated in favor of `tools`. + + A list of functions the model may generate JSON inputs for. + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the + tokenizer) to an associated bias value from -100 to 100. Mathematically, the + bias is added to the logits generated by the model prior to sampling. The exact + effect will vary per model, but values between -1 and 1 should decrease or + increase likelihood of selection; values like -100 or 100 should result in a ban + or exclusive selection of the relevant token. + + logprobs: Whether to return log probabilities of the output tokens or not. If true, + returns the log probabilities of each output token returned in the `content` of + `message`. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the chat + completion. + + The total length of input tokens and generated tokens is limited by the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many chat completion choices to generate for each input message. Note that + you will be charged based on the number of generated tokens across all of the + choices. Keep `n` as `1` to minimize costs. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + response_format: An object specifying the format that the model must output. Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + + seed: This feature is in Beta. If specified, our system will make a best effort to + sample deterministically, such that repeated requests with the same `seed` and + parameters should return the same result. Determinism is not guaranteed, and you + should refer to the `system_fingerprint` response parameter to monitor changes + in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + tool_choice: Controls which (if any) tool is called by the model. `none` means the model will + not call any tool and instead generates a message. `auto` means the model can + pick between generating a message or calling one or more tools. `required` means + the model must call one or more tools. Specifying a particular tool via + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + `none` is the default when no tools are present. `auto` is the default if tools + are present. + + tools: A list of tools the model may call. Currently, only functions are supported as a + tool. Use this to provide a list of functions the model may generate JSON inputs + for. A max of 128 functions are supported. + + top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + `logprobs` must be set to `true` if this parameter is used. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["messages", "model"], ["messages", "model", "stream"]) + async def create( + self, + *, + messages: Iterable[ChatCompletionMessageParam], + model: Union[str, ChatModel], + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, + tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ChatCompletion | AsyncStream[ChatCompletionChunk]: + return await self._post( + "/chat/completions", + body=await async_maybe_transform( + { + "messages": messages, + "model": model, + "frequency_penalty": frequency_penalty, + "function_call": function_call, + "functions": functions, + "logit_bias": logit_bias, + "logprobs": logprobs, + "max_tokens": max_tokens, + "n": n, + "presence_penalty": presence_penalty, + "response_format": response_format, + "seed": seed, + "stop": stop, + "stream": stream, + "temperature": temperature, + "tool_choice": tool_choice, + "tools": tools, + "top_logprobs": top_logprobs, + "top_p": top_p, + "user": user, + }, + completion_create_params.CompletionCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ChatCompletion, + stream=stream or False, + stream_cls=AsyncStream[ChatCompletionChunk], + ) + + +class CompletionsWithRawResponse: + def __init__(self, completions: Completions) -> None: + self._completions = completions + + self.create = _legacy_response.to_raw_response_wrapper( + completions.create, + ) + + +class AsyncCompletionsWithRawResponse: + def __init__(self, completions: AsyncCompletions) -> None: + self._completions = completions + + self.create = _legacy_response.async_to_raw_response_wrapper( + completions.create, + ) + + +class CompletionsWithStreamingResponse: + def __init__(self, completions: Completions) -> None: + self._completions = completions + + self.create = to_streamed_response_wrapper( + completions.create, + ) + + +class AsyncCompletionsWithStreamingResponse: + def __init__(self, completions: AsyncCompletions) -> None: + self._completions = completions + + self.create = async_to_streamed_response_wrapper( + completions.create, + ) diff --git a/.venv/Lib/site-packages/openai/resources/completions.py b/.venv/Lib/site-packages/openai/resources/completions.py new file mode 100644 index 00000000..eb6ca310 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/completions.py @@ -0,0 +1,1103 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Union, Iterable, Optional, overload +from typing_extensions import Literal + +import httpx + +from .. import _legacy_response +from ..types import completion_create_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import ( + required_args, + maybe_transform, + async_maybe_transform, +) +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .._streaming import Stream, AsyncStream +from .._base_client import ( + make_request_options, +) +from ..types.completion import Completion + +__all__ = ["Completions", "AsyncCompletions"] + + +class Completions(SyncAPIResource): + @cached_property + def with_raw_response(self) -> CompletionsWithRawResponse: + return CompletionsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> CompletionsWithStreamingResponse: + return CompletionsWithStreamingResponse(self) + + @overload + def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Completion: + """ + Creates a completion for the provided prompt and parameters. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + prompt: The prompt(s) to generate completions for, encoded as a string, array of + strings, array of tokens, or array of token arrays. + + Note that <|endoftext|> is the document separator that the model sees during + training, so if a prompt is not specified the model will generate as if from the + beginning of a new document. + + best_of: Generates `best_of` completions server-side and returns the "best" (the one with + the highest log probability per token). Results cannot be streamed. + + When used with `n`, `best_of` controls the number of candidate completions and + `n` specifies how many to return – `best_of` must be greater than `n`. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + echo: Echo back the prompt in addition to the completion + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the GPT + tokenizer) to an associated bias value from -100 to 100. You can use this + [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + Mathematically, the bias is added to the logits generated by the model prior to + sampling. The exact effect will vary per model, but values between -1 and 1 + should decrease or increase likelihood of selection; values like -100 or 100 + should result in a ban or exclusive selection of the relevant token. + + As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + from being generated. + + logprobs: Include the log probabilities on the `logprobs` most likely output tokens, as + well the chosen tokens. For example, if `logprobs` is 5, the API will return a + list of the 5 most likely tokens. The API will always return the `logprob` of + the sampled token, so there may be up to `logprobs+1` elements in the response. + + The maximum value for `logprobs` is 5. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the + completion. + + The token count of your prompt plus `max_tokens` cannot exceed the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many completions to generate for each prompt. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + seed: If specified, our system will make a best effort to sample deterministically, + such that repeated requests with the same `seed` and parameters should return + the same result. + + Determinism is not guaranteed, and you should refer to the `system_fingerprint` + response parameter to monitor changes in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. The + returned text will not contain the stop sequence. + + stream: Whether to stream back partial progress. If set, tokens will be sent as + data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + suffix: The suffix that comes after a completion of inserted text. + + This parameter is only supported for `gpt-3.5-turbo-instruct`. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + stream: Literal[True], + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[Completion]: + """ + Creates a completion for the provided prompt and parameters. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + prompt: The prompt(s) to generate completions for, encoded as a string, array of + strings, array of tokens, or array of token arrays. + + Note that <|endoftext|> is the document separator that the model sees during + training, so if a prompt is not specified the model will generate as if from the + beginning of a new document. + + stream: Whether to stream back partial progress. If set, tokens will be sent as + data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + best_of: Generates `best_of` completions server-side and returns the "best" (the one with + the highest log probability per token). Results cannot be streamed. + + When used with `n`, `best_of` controls the number of candidate completions and + `n` specifies how many to return – `best_of` must be greater than `n`. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + echo: Echo back the prompt in addition to the completion + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the GPT + tokenizer) to an associated bias value from -100 to 100. You can use this + [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + Mathematically, the bias is added to the logits generated by the model prior to + sampling. The exact effect will vary per model, but values between -1 and 1 + should decrease or increase likelihood of selection; values like -100 or 100 + should result in a ban or exclusive selection of the relevant token. + + As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + from being generated. + + logprobs: Include the log probabilities on the `logprobs` most likely output tokens, as + well the chosen tokens. For example, if `logprobs` is 5, the API will return a + list of the 5 most likely tokens. The API will always return the `logprob` of + the sampled token, so there may be up to `logprobs+1` elements in the response. + + The maximum value for `logprobs` is 5. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the + completion. + + The token count of your prompt plus `max_tokens` cannot exceed the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many completions to generate for each prompt. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + seed: If specified, our system will make a best effort to sample deterministically, + such that repeated requests with the same `seed` and parameters should return + the same result. + + Determinism is not guaranteed, and you should refer to the `system_fingerprint` + response parameter to monitor changes in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. The + returned text will not contain the stop sequence. + + suffix: The suffix that comes after a completion of inserted text. + + This parameter is only supported for `gpt-3.5-turbo-instruct`. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + stream: bool, + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Completion | Stream[Completion]: + """ + Creates a completion for the provided prompt and parameters. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + prompt: The prompt(s) to generate completions for, encoded as a string, array of + strings, array of tokens, or array of token arrays. + + Note that <|endoftext|> is the document separator that the model sees during + training, so if a prompt is not specified the model will generate as if from the + beginning of a new document. + + stream: Whether to stream back partial progress. If set, tokens will be sent as + data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + best_of: Generates `best_of` completions server-side and returns the "best" (the one with + the highest log probability per token). Results cannot be streamed. + + When used with `n`, `best_of` controls the number of candidate completions and + `n` specifies how many to return – `best_of` must be greater than `n`. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + echo: Echo back the prompt in addition to the completion + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the GPT + tokenizer) to an associated bias value from -100 to 100. You can use this + [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + Mathematically, the bias is added to the logits generated by the model prior to + sampling. The exact effect will vary per model, but values between -1 and 1 + should decrease or increase likelihood of selection; values like -100 or 100 + should result in a ban or exclusive selection of the relevant token. + + As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + from being generated. + + logprobs: Include the log probabilities on the `logprobs` most likely output tokens, as + well the chosen tokens. For example, if `logprobs` is 5, the API will return a + list of the 5 most likely tokens. The API will always return the `logprob` of + the sampled token, so there may be up to `logprobs+1` elements in the response. + + The maximum value for `logprobs` is 5. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the + completion. + + The token count of your prompt plus `max_tokens` cannot exceed the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many completions to generate for each prompt. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + seed: If specified, our system will make a best effort to sample deterministically, + such that repeated requests with the same `seed` and parameters should return + the same result. + + Determinism is not guaranteed, and you should refer to the `system_fingerprint` + response parameter to monitor changes in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. The + returned text will not contain the stop sequence. + + suffix: The suffix that comes after a completion of inserted text. + + This parameter is only supported for `gpt-3.5-turbo-instruct`. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["model", "prompt"], ["model", "prompt", "stream"]) + def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Completion | Stream[Completion]: + return self._post( + "/completions", + body=maybe_transform( + { + "model": model, + "prompt": prompt, + "best_of": best_of, + "echo": echo, + "frequency_penalty": frequency_penalty, + "logit_bias": logit_bias, + "logprobs": logprobs, + "max_tokens": max_tokens, + "n": n, + "presence_penalty": presence_penalty, + "seed": seed, + "stop": stop, + "stream": stream, + "suffix": suffix, + "temperature": temperature, + "top_p": top_p, + "user": user, + }, + completion_create_params.CompletionCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Completion, + stream=stream or False, + stream_cls=Stream[Completion], + ) + + +class AsyncCompletions(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncCompletionsWithRawResponse: + return AsyncCompletionsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncCompletionsWithStreamingResponse: + return AsyncCompletionsWithStreamingResponse(self) + + @overload + async def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Completion: + """ + Creates a completion for the provided prompt and parameters. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + prompt: The prompt(s) to generate completions for, encoded as a string, array of + strings, array of tokens, or array of token arrays. + + Note that <|endoftext|> is the document separator that the model sees during + training, so if a prompt is not specified the model will generate as if from the + beginning of a new document. + + best_of: Generates `best_of` completions server-side and returns the "best" (the one with + the highest log probability per token). Results cannot be streamed. + + When used with `n`, `best_of` controls the number of candidate completions and + `n` specifies how many to return – `best_of` must be greater than `n`. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + echo: Echo back the prompt in addition to the completion + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the GPT + tokenizer) to an associated bias value from -100 to 100. You can use this + [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + Mathematically, the bias is added to the logits generated by the model prior to + sampling. The exact effect will vary per model, but values between -1 and 1 + should decrease or increase likelihood of selection; values like -100 or 100 + should result in a ban or exclusive selection of the relevant token. + + As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + from being generated. + + logprobs: Include the log probabilities on the `logprobs` most likely output tokens, as + well the chosen tokens. For example, if `logprobs` is 5, the API will return a + list of the 5 most likely tokens. The API will always return the `logprob` of + the sampled token, so there may be up to `logprobs+1` elements in the response. + + The maximum value for `logprobs` is 5. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the + completion. + + The token count of your prompt plus `max_tokens` cannot exceed the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many completions to generate for each prompt. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + seed: If specified, our system will make a best effort to sample deterministically, + such that repeated requests with the same `seed` and parameters should return + the same result. + + Determinism is not guaranteed, and you should refer to the `system_fingerprint` + response parameter to monitor changes in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. The + returned text will not contain the stop sequence. + + stream: Whether to stream back partial progress. If set, tokens will be sent as + data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + suffix: The suffix that comes after a completion of inserted text. + + This parameter is only supported for `gpt-3.5-turbo-instruct`. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + stream: Literal[True], + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[Completion]: + """ + Creates a completion for the provided prompt and parameters. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + prompt: The prompt(s) to generate completions for, encoded as a string, array of + strings, array of tokens, or array of token arrays. + + Note that <|endoftext|> is the document separator that the model sees during + training, so if a prompt is not specified the model will generate as if from the + beginning of a new document. + + stream: Whether to stream back partial progress. If set, tokens will be sent as + data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + best_of: Generates `best_of` completions server-side and returns the "best" (the one with + the highest log probability per token). Results cannot be streamed. + + When used with `n`, `best_of` controls the number of candidate completions and + `n` specifies how many to return – `best_of` must be greater than `n`. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + echo: Echo back the prompt in addition to the completion + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the GPT + tokenizer) to an associated bias value from -100 to 100. You can use this + [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + Mathematically, the bias is added to the logits generated by the model prior to + sampling. The exact effect will vary per model, but values between -1 and 1 + should decrease or increase likelihood of selection; values like -100 or 100 + should result in a ban or exclusive selection of the relevant token. + + As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + from being generated. + + logprobs: Include the log probabilities on the `logprobs` most likely output tokens, as + well the chosen tokens. For example, if `logprobs` is 5, the API will return a + list of the 5 most likely tokens. The API will always return the `logprob` of + the sampled token, so there may be up to `logprobs+1` elements in the response. + + The maximum value for `logprobs` is 5. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the + completion. + + The token count of your prompt plus `max_tokens` cannot exceed the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many completions to generate for each prompt. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + seed: If specified, our system will make a best effort to sample deterministically, + such that repeated requests with the same `seed` and parameters should return + the same result. + + Determinism is not guaranteed, and you should refer to the `system_fingerprint` + response parameter to monitor changes in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. The + returned text will not contain the stop sequence. + + suffix: The suffix that comes after a completion of inserted text. + + This parameter is only supported for `gpt-3.5-turbo-instruct`. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + stream: bool, + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Completion | AsyncStream[Completion]: + """ + Creates a completion for the provided prompt and parameters. + + Args: + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + prompt: The prompt(s) to generate completions for, encoded as a string, array of + strings, array of tokens, or array of token arrays. + + Note that <|endoftext|> is the document separator that the model sees during + training, so if a prompt is not specified the model will generate as if from the + beginning of a new document. + + stream: Whether to stream back partial progress. If set, tokens will be sent as + data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + + best_of: Generates `best_of` completions server-side and returns the "best" (the one with + the highest log probability per token). Results cannot be streamed. + + When used with `n`, `best_of` controls the number of candidate completions and + `n` specifies how many to return – `best_of` must be greater than `n`. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + echo: Echo back the prompt in addition to the completion + + frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their + existing frequency in the text so far, decreasing the model's likelihood to + repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + logit_bias: Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the GPT + tokenizer) to an associated bias value from -100 to 100. You can use this + [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + Mathematically, the bias is added to the logits generated by the model prior to + sampling. The exact effect will vary per model, but values between -1 and 1 + should decrease or increase likelihood of selection; values like -100 or 100 + should result in a ban or exclusive selection of the relevant token. + + As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + from being generated. + + logprobs: Include the log probabilities on the `logprobs` most likely output tokens, as + well the chosen tokens. For example, if `logprobs` is 5, the API will return a + list of the 5 most likely tokens. The API will always return the `logprob` of + the sampled token, so there may be up to `logprobs+1` elements in the response. + + The maximum value for `logprobs` is 5. + + max_tokens: The maximum number of [tokens](/tokenizer) that can be generated in the + completion. + + The token count of your prompt plus `max_tokens` cannot exceed the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + n: How many completions to generate for each prompt. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + + presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's likelihood to + talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + + seed: If specified, our system will make a best effort to sample deterministically, + such that repeated requests with the same `seed` and parameters should return + the same result. + + Determinism is not guaranteed, and you should refer to the `system_fingerprint` + response parameter to monitor changes in the backend. + + stop: Up to 4 sequences where the API will stop generating further tokens. The + returned text will not contain the stop sequence. + + suffix: The suffix that comes after a completion of inserted text. + + This parameter is only supported for `gpt-3.5-turbo-instruct`. + + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + + top_p: An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["model", "prompt"], ["model", "prompt", "stream"]) + async def create( + self, + *, + model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]], + prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None], + best_of: Optional[int] | NotGiven = NOT_GIVEN, + echo: Optional[bool] | NotGiven = NOT_GIVEN, + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, + logprobs: Optional[int] | NotGiven = NOT_GIVEN, + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, + top_p: Optional[float] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Completion | AsyncStream[Completion]: + return await self._post( + "/completions", + body=await async_maybe_transform( + { + "model": model, + "prompt": prompt, + "best_of": best_of, + "echo": echo, + "frequency_penalty": frequency_penalty, + "logit_bias": logit_bias, + "logprobs": logprobs, + "max_tokens": max_tokens, + "n": n, + "presence_penalty": presence_penalty, + "seed": seed, + "stop": stop, + "stream": stream, + "suffix": suffix, + "temperature": temperature, + "top_p": top_p, + "user": user, + }, + completion_create_params.CompletionCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Completion, + stream=stream or False, + stream_cls=AsyncStream[Completion], + ) + + +class CompletionsWithRawResponse: + def __init__(self, completions: Completions) -> None: + self._completions = completions + + self.create = _legacy_response.to_raw_response_wrapper( + completions.create, + ) + + +class AsyncCompletionsWithRawResponse: + def __init__(self, completions: AsyncCompletions) -> None: + self._completions = completions + + self.create = _legacy_response.async_to_raw_response_wrapper( + completions.create, + ) + + +class CompletionsWithStreamingResponse: + def __init__(self, completions: Completions) -> None: + self._completions = completions + + self.create = to_streamed_response_wrapper( + completions.create, + ) + + +class AsyncCompletionsWithStreamingResponse: + def __init__(self, completions: AsyncCompletions) -> None: + self._completions = completions + + self.create = async_to_streamed_response_wrapper( + completions.create, + ) diff --git a/.venv/Lib/site-packages/openai/resources/embeddings.py b/.venv/Lib/site-packages/openai/resources/embeddings.py new file mode 100644 index 00000000..773b6f09 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/embeddings.py @@ -0,0 +1,262 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import base64 +from typing import List, Union, Iterable, cast +from typing_extensions import Literal + +import httpx + +from .. import _legacy_response +from ..types import embedding_create_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import is_given, maybe_transform +from .._compat import cached_property +from .._extras import numpy as np, has_numpy +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .._base_client import ( + make_request_options, +) +from ..types.create_embedding_response import CreateEmbeddingResponse + +__all__ = ["Embeddings", "AsyncEmbeddings"] + + +class Embeddings(SyncAPIResource): + @cached_property + def with_raw_response(self) -> EmbeddingsWithRawResponse: + return EmbeddingsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EmbeddingsWithStreamingResponse: + return EmbeddingsWithStreamingResponse(self) + + def create( + self, + *, + input: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]], + model: Union[str, Literal["text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large"]], + dimensions: int | NotGiven = NOT_GIVEN, + encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> CreateEmbeddingResponse: + """ + Creates an embedding vector representing the input text. + + Args: + input: Input text to embed, encoded as a string or array of tokens. To embed multiple + inputs in a single request, pass an array of strings or array of token arrays. + The input must not exceed the max input tokens for the model (8192 tokens for + `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + dimensions or less. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + dimensions: The number of dimensions the resulting output embeddings should have. Only + supported in `text-embedding-3` and later models. + + encoding_format: The format to return the embeddings in. Can be either `float` or + [`base64`](https://pypi.org/project/pybase64/). + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + params = { + "input": input, + "model": model, + "user": user, + "dimensions": dimensions, + "encoding_format": encoding_format, + } + if not is_given(encoding_format) and has_numpy(): + params["encoding_format"] = "base64" + + def parser(obj: CreateEmbeddingResponse) -> CreateEmbeddingResponse: + if is_given(encoding_format): + # don't modify the response object if a user explicitly asked for a format + return obj + + for embedding in obj.data: + data = cast(object, embedding.embedding) + if not isinstance(data, str): + # numpy is not installed / base64 optimisation isn't enabled for this model yet + continue + + embedding.embedding = np.frombuffer( # type: ignore[no-untyped-call] + base64.b64decode(data), dtype="float32" + ).tolist() + + return obj + + return self._post( + "/embeddings", + body=maybe_transform(params, embedding_create_params.EmbeddingCreateParams), + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + post_parser=parser, + ), + cast_to=CreateEmbeddingResponse, + ) + + +class AsyncEmbeddings(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncEmbeddingsWithRawResponse: + return AsyncEmbeddingsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEmbeddingsWithStreamingResponse: + return AsyncEmbeddingsWithStreamingResponse(self) + + async def create( + self, + *, + input: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]], + model: Union[str, Literal["text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large"]], + dimensions: int | NotGiven = NOT_GIVEN, + encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> CreateEmbeddingResponse: + """ + Creates an embedding vector representing the input text. + + Args: + input: Input text to embed, encoded as a string or array of tokens. To embed multiple + inputs in a single request, pass an array of strings or array of token arrays. + The input must not exceed the max input tokens for the model (8192 tokens for + `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + dimensions or less. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + + model: ID of the model to use. You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + + dimensions: The number of dimensions the resulting output embeddings should have. Only + supported in `text-embedding-3` and later models. + + encoding_format: The format to return the embeddings in. Can be either `float` or + [`base64`](https://pypi.org/project/pybase64/). + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + params = { + "input": input, + "model": model, + "user": user, + "dimensions": dimensions, + "encoding_format": encoding_format, + } + if not is_given(encoding_format) and has_numpy(): + params["encoding_format"] = "base64" + + def parser(obj: CreateEmbeddingResponse) -> CreateEmbeddingResponse: + if is_given(encoding_format): + # don't modify the response object if a user explicitly asked for a format + return obj + + for embedding in obj.data: + data = cast(object, embedding.embedding) + if not isinstance(data, str): + # numpy is not installed / base64 optimisation isn't enabled for this model yet + continue + + embedding.embedding = np.frombuffer( # type: ignore[no-untyped-call] + base64.b64decode(data), dtype="float32" + ).tolist() + + return obj + + return await self._post( + "/embeddings", + body=maybe_transform(params, embedding_create_params.EmbeddingCreateParams), + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + post_parser=parser, + ), + cast_to=CreateEmbeddingResponse, + ) + + +class EmbeddingsWithRawResponse: + def __init__(self, embeddings: Embeddings) -> None: + self._embeddings = embeddings + + self.create = _legacy_response.to_raw_response_wrapper( + embeddings.create, + ) + + +class AsyncEmbeddingsWithRawResponse: + def __init__(self, embeddings: AsyncEmbeddings) -> None: + self._embeddings = embeddings + + self.create = _legacy_response.async_to_raw_response_wrapper( + embeddings.create, + ) + + +class EmbeddingsWithStreamingResponse: + def __init__(self, embeddings: Embeddings) -> None: + self._embeddings = embeddings + + self.create = to_streamed_response_wrapper( + embeddings.create, + ) + + +class AsyncEmbeddingsWithStreamingResponse: + def __init__(self, embeddings: AsyncEmbeddings) -> None: + self._embeddings = embeddings + + self.create = async_to_streamed_response_wrapper( + embeddings.create, + ) diff --git a/.venv/Lib/site-packages/openai/resources/files.py b/.venv/Lib/site-packages/openai/resources/files.py new file mode 100644 index 00000000..fa03a9c0 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/files.py @@ -0,0 +1,691 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import time +import typing_extensions +from typing import Mapping, cast +from typing_extensions import Literal + +import httpx + +from .. import _legacy_response +from ..types import file_list_params, file_create_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes +from .._utils import ( + extract_files, + maybe_transform, + deepcopy_minimal, + async_maybe_transform, +) +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_streamed_response_wrapper, + async_to_streamed_response_wrapper, + to_custom_streamed_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from ..pagination import SyncPage, AsyncPage +from .._base_client import ( + AsyncPaginator, + make_request_options, +) +from ..types.file_object import FileObject +from ..types.file_deleted import FileDeleted + +__all__ = ["Files", "AsyncFiles"] + + +class Files(SyncAPIResource): + @cached_property + def with_raw_response(self) -> FilesWithRawResponse: + return FilesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> FilesWithStreamingResponse: + return FilesWithStreamingResponse(self) + + def create( + self, + *, + file: FileTypes, + purpose: Literal["fine-tune", "assistants"], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FileObject: + """Upload a file that can be used across various endpoints. + + The size of all the + files uploaded by one organization can be up to 100 GB. + + The size of individual files can be a maximum of 512 MB or 2 million tokens for + Assistants. See the + [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) to + learn more about the types of files supported. The Fine-tuning API only supports + `.jsonl` files. + + Please [contact us](https://help.openai.com/) if you need to increase these + storage limits. + + Args: + file: The File object (not file name) to be uploaded. + + purpose: The intended purpose of the uploaded file. + + Use "fine-tune" for + [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning) and + "assistants" for + [Assistants](https://platform.openai.com/docs/api-reference/assistants) and + [Messages](https://platform.openai.com/docs/api-reference/messages). This allows + us to validate the format of the uploaded file is correct for fine-tuning. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "purpose": purpose, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return self._post( + "/files", + body=maybe_transform(body, file_create_params.FileCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FileObject, + ) + + def retrieve( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FileObject: + """ + Returns information about a specific file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + return self._get( + f"/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FileObject, + ) + + def list( + self, + *, + purpose: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncPage[FileObject]: + """ + Returns a list of files that belong to the user's organization. + + Args: + purpose: Only return files with the given purpose. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/files", + page=SyncPage[FileObject], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"purpose": purpose}, file_list_params.FileListParams), + ), + model=FileObject, + ) + + def delete( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FileDeleted: + """ + Delete a file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + return self._delete( + f"/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FileDeleted, + ) + + def content( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> _legacy_response.HttpxBinaryResponseContent: + """ + Returns the contents of the specified file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + extra_headers = {"Accept": "application/binary", **(extra_headers or {})} + return self._get( + f"/files/{file_id}/content", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=_legacy_response.HttpxBinaryResponseContent, + ) + + @typing_extensions.deprecated("The `.content()` method should be used instead") + def retrieve_content( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> str: + """ + Returns the contents of the specified file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + return self._get( + f"/files/{file_id}/content", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=str, + ) + + def wait_for_processing( + self, + id: str, + *, + poll_interval: float = 5.0, + max_wait_seconds: float = 30 * 60, + ) -> FileObject: + """Waits for the given file to be processed, default timeout is 30 mins.""" + TERMINAL_STATES = {"processed", "error", "deleted"} + + start = time.time() + file = self.retrieve(id) + while file.status not in TERMINAL_STATES: + self._sleep(poll_interval) + + file = self.retrieve(id) + if time.time() - start > max_wait_seconds: + raise RuntimeError( + f"Giving up on waiting for file {id} to finish processing after {max_wait_seconds} seconds." + ) + + return file + + +class AsyncFiles(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncFilesWithRawResponse: + return AsyncFilesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncFilesWithStreamingResponse: + return AsyncFilesWithStreamingResponse(self) + + async def create( + self, + *, + file: FileTypes, + purpose: Literal["fine-tune", "assistants"], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FileObject: + """Upload a file that can be used across various endpoints. + + The size of all the + files uploaded by one organization can be up to 100 GB. + + The size of individual files can be a maximum of 512 MB or 2 million tokens for + Assistants. See the + [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) to + learn more about the types of files supported. The Fine-tuning API only supports + `.jsonl` files. + + Please [contact us](https://help.openai.com/) if you need to increase these + storage limits. + + Args: + file: The File object (not file name) to be uploaded. + + purpose: The intended purpose of the uploaded file. + + Use "fine-tune" for + [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning) and + "assistants" for + [Assistants](https://platform.openai.com/docs/api-reference/assistants) and + [Messages](https://platform.openai.com/docs/api-reference/messages). This allows + us to validate the format of the uploaded file is correct for fine-tuning. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "purpose": purpose, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return await self._post( + "/files", + body=await async_maybe_transform(body, file_create_params.FileCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FileObject, + ) + + async def retrieve( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FileObject: + """ + Returns information about a specific file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + return await self._get( + f"/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FileObject, + ) + + def list( + self, + *, + purpose: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[FileObject, AsyncPage[FileObject]]: + """ + Returns a list of files that belong to the user's organization. + + Args: + purpose: Only return files with the given purpose. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/files", + page=AsyncPage[FileObject], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"purpose": purpose}, file_list_params.FileListParams), + ), + model=FileObject, + ) + + async def delete( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FileDeleted: + """ + Delete a file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + return await self._delete( + f"/files/{file_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FileDeleted, + ) + + async def content( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> _legacy_response.HttpxBinaryResponseContent: + """ + Returns the contents of the specified file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + extra_headers = {"Accept": "application/binary", **(extra_headers or {})} + return await self._get( + f"/files/{file_id}/content", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=_legacy_response.HttpxBinaryResponseContent, + ) + + @typing_extensions.deprecated("The `.content()` method should be used instead") + async def retrieve_content( + self, + file_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> str: + """ + Returns the contents of the specified file. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not file_id: + raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") + return await self._get( + f"/files/{file_id}/content", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=str, + ) + + async def wait_for_processing( + self, + id: str, + *, + poll_interval: float = 5.0, + max_wait_seconds: float = 30 * 60, + ) -> FileObject: + """Waits for the given file to be processed, default timeout is 30 mins.""" + TERMINAL_STATES = {"processed", "error", "deleted"} + + start = time.time() + file = await self.retrieve(id) + while file.status not in TERMINAL_STATES: + await self._sleep(poll_interval) + + file = await self.retrieve(id) + if time.time() - start > max_wait_seconds: + raise RuntimeError( + f"Giving up on waiting for file {id} to finish processing after {max_wait_seconds} seconds." + ) + + return file + + +class FilesWithRawResponse: + def __init__(self, files: Files) -> None: + self._files = files + + self.create = _legacy_response.to_raw_response_wrapper( + files.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + files.retrieve, + ) + self.list = _legacy_response.to_raw_response_wrapper( + files.list, + ) + self.delete = _legacy_response.to_raw_response_wrapper( + files.delete, + ) + self.content = _legacy_response.to_raw_response_wrapper( + files.content, + ) + self.retrieve_content = ( # pyright: ignore[reportDeprecated] + _legacy_response.to_raw_response_wrapper( + files.retrieve_content # pyright: ignore[reportDeprecated], + ) + ) + + +class AsyncFilesWithRawResponse: + def __init__(self, files: AsyncFiles) -> None: + self._files = files + + self.create = _legacy_response.async_to_raw_response_wrapper( + files.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + files.retrieve, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + files.list, + ) + self.delete = _legacy_response.async_to_raw_response_wrapper( + files.delete, + ) + self.content = _legacy_response.async_to_raw_response_wrapper( + files.content, + ) + self.retrieve_content = ( # pyright: ignore[reportDeprecated] + _legacy_response.async_to_raw_response_wrapper( + files.retrieve_content # pyright: ignore[reportDeprecated], + ) + ) + + +class FilesWithStreamingResponse: + def __init__(self, files: Files) -> None: + self._files = files + + self.create = to_streamed_response_wrapper( + files.create, + ) + self.retrieve = to_streamed_response_wrapper( + files.retrieve, + ) + self.list = to_streamed_response_wrapper( + files.list, + ) + self.delete = to_streamed_response_wrapper( + files.delete, + ) + self.content = to_custom_streamed_response_wrapper( + files.content, + StreamedBinaryAPIResponse, + ) + self.retrieve_content = ( # pyright: ignore[reportDeprecated] + to_streamed_response_wrapper( + files.retrieve_content # pyright: ignore[reportDeprecated], + ) + ) + + +class AsyncFilesWithStreamingResponse: + def __init__(self, files: AsyncFiles) -> None: + self._files = files + + self.create = async_to_streamed_response_wrapper( + files.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + files.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + files.list, + ) + self.delete = async_to_streamed_response_wrapper( + files.delete, + ) + self.content = async_to_custom_streamed_response_wrapper( + files.content, + AsyncStreamedBinaryAPIResponse, + ) + self.retrieve_content = ( # pyright: ignore[reportDeprecated] + async_to_streamed_response_wrapper( + files.retrieve_content # pyright: ignore[reportDeprecated], + ) + ) diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/__init__.py b/.venv/Lib/site-packages/openai/resources/fine_tuning/__init__.py new file mode 100644 index 00000000..7765231f --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/fine_tuning/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .jobs import ( + Jobs, + AsyncJobs, + JobsWithRawResponse, + AsyncJobsWithRawResponse, + JobsWithStreamingResponse, + AsyncJobsWithStreamingResponse, +) +from .fine_tuning import ( + FineTuning, + AsyncFineTuning, + FineTuningWithRawResponse, + AsyncFineTuningWithRawResponse, + FineTuningWithStreamingResponse, + AsyncFineTuningWithStreamingResponse, +) + +__all__ = [ + "Jobs", + "AsyncJobs", + "JobsWithRawResponse", + "AsyncJobsWithRawResponse", + "JobsWithStreamingResponse", + "AsyncJobsWithStreamingResponse", + "FineTuning", + "AsyncFineTuning", + "FineTuningWithRawResponse", + "AsyncFineTuningWithRawResponse", + "FineTuningWithStreamingResponse", + "AsyncFineTuningWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/fine_tuning/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..c1c93aee Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/fine_tuning/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/__pycache__/fine_tuning.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/fine_tuning/__pycache__/fine_tuning.cpython-311.pyc new file mode 100644 index 00000000..307c7d09 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/fine_tuning/__pycache__/fine_tuning.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/fine_tuning.py b/.venv/Lib/site-packages/openai/resources/fine_tuning/fine_tuning.py new file mode 100644 index 00000000..0404fed6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/fine_tuning/fine_tuning.py @@ -0,0 +1,81 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .jobs import ( + Jobs, + AsyncJobs, + JobsWithRawResponse, + AsyncJobsWithRawResponse, + JobsWithStreamingResponse, + AsyncJobsWithStreamingResponse, +) +from ..._compat import cached_property +from .jobs.jobs import Jobs, AsyncJobs +from ..._resource import SyncAPIResource, AsyncAPIResource + +__all__ = ["FineTuning", "AsyncFineTuning"] + + +class FineTuning(SyncAPIResource): + @cached_property + def jobs(self) -> Jobs: + return Jobs(self._client) + + @cached_property + def with_raw_response(self) -> FineTuningWithRawResponse: + return FineTuningWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> FineTuningWithStreamingResponse: + return FineTuningWithStreamingResponse(self) + + +class AsyncFineTuning(AsyncAPIResource): + @cached_property + def jobs(self) -> AsyncJobs: + return AsyncJobs(self._client) + + @cached_property + def with_raw_response(self) -> AsyncFineTuningWithRawResponse: + return AsyncFineTuningWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncFineTuningWithStreamingResponse: + return AsyncFineTuningWithStreamingResponse(self) + + +class FineTuningWithRawResponse: + def __init__(self, fine_tuning: FineTuning) -> None: + self._fine_tuning = fine_tuning + + @cached_property + def jobs(self) -> JobsWithRawResponse: + return JobsWithRawResponse(self._fine_tuning.jobs) + + +class AsyncFineTuningWithRawResponse: + def __init__(self, fine_tuning: AsyncFineTuning) -> None: + self._fine_tuning = fine_tuning + + @cached_property + def jobs(self) -> AsyncJobsWithRawResponse: + return AsyncJobsWithRawResponse(self._fine_tuning.jobs) + + +class FineTuningWithStreamingResponse: + def __init__(self, fine_tuning: FineTuning) -> None: + self._fine_tuning = fine_tuning + + @cached_property + def jobs(self) -> JobsWithStreamingResponse: + return JobsWithStreamingResponse(self._fine_tuning.jobs) + + +class AsyncFineTuningWithStreamingResponse: + def __init__(self, fine_tuning: AsyncFineTuning) -> None: + self._fine_tuning = fine_tuning + + @cached_property + def jobs(self) -> AsyncJobsWithStreamingResponse: + return AsyncJobsWithStreamingResponse(self._fine_tuning.jobs) diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__init__.py b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__init__.py new file mode 100644 index 00000000..94cd1fb7 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .jobs import ( + Jobs, + AsyncJobs, + JobsWithRawResponse, + AsyncJobsWithRawResponse, + JobsWithStreamingResponse, + AsyncJobsWithStreamingResponse, +) +from .checkpoints import ( + Checkpoints, + AsyncCheckpoints, + CheckpointsWithRawResponse, + AsyncCheckpointsWithRawResponse, + CheckpointsWithStreamingResponse, + AsyncCheckpointsWithStreamingResponse, +) + +__all__ = [ + "Checkpoints", + "AsyncCheckpoints", + "CheckpointsWithRawResponse", + "AsyncCheckpointsWithRawResponse", + "CheckpointsWithStreamingResponse", + "AsyncCheckpointsWithStreamingResponse", + "Jobs", + "AsyncJobs", + "JobsWithRawResponse", + "AsyncJobsWithRawResponse", + "JobsWithStreamingResponse", + "AsyncJobsWithStreamingResponse", +] diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..68c21a95 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/checkpoints.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/checkpoints.cpython-311.pyc new file mode 100644 index 00000000..2ba4e6bf Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/checkpoints.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/jobs.cpython-311.pyc b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/jobs.cpython-311.pyc new file mode 100644 index 00000000..35a241b5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/__pycache__/jobs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/checkpoints.py b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/checkpoints.py new file mode 100644 index 00000000..67f5739a --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/checkpoints.py @@ -0,0 +1,177 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from .... import _legacy_response +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._utils import maybe_transform +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ....pagination import SyncCursorPage, AsyncCursorPage +from ...._base_client import ( + AsyncPaginator, + make_request_options, +) +from ....types.fine_tuning.jobs import checkpoint_list_params +from ....types.fine_tuning.jobs.fine_tuning_job_checkpoint import FineTuningJobCheckpoint + +__all__ = ["Checkpoints", "AsyncCheckpoints"] + + +class Checkpoints(SyncAPIResource): + @cached_property + def with_raw_response(self) -> CheckpointsWithRawResponse: + return CheckpointsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> CheckpointsWithStreamingResponse: + return CheckpointsWithStreamingResponse(self) + + def list( + self, + fine_tuning_job_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[FineTuningJobCheckpoint]: + """ + List checkpoints for a fine-tuning job. + + Args: + after: Identifier for the last checkpoint ID from the previous pagination request. + + limit: Number of checkpoints to retrieve. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return self._get_api_list( + f"/fine_tuning/jobs/{fine_tuning_job_id}/checkpoints", + page=SyncCursorPage[FineTuningJobCheckpoint], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + checkpoint_list_params.CheckpointListParams, + ), + ), + model=FineTuningJobCheckpoint, + ) + + +class AsyncCheckpoints(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncCheckpointsWithRawResponse: + return AsyncCheckpointsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncCheckpointsWithStreamingResponse: + return AsyncCheckpointsWithStreamingResponse(self) + + def list( + self, + fine_tuning_job_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[FineTuningJobCheckpoint, AsyncCursorPage[FineTuningJobCheckpoint]]: + """ + List checkpoints for a fine-tuning job. + + Args: + after: Identifier for the last checkpoint ID from the previous pagination request. + + limit: Number of checkpoints to retrieve. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return self._get_api_list( + f"/fine_tuning/jobs/{fine_tuning_job_id}/checkpoints", + page=AsyncCursorPage[FineTuningJobCheckpoint], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + checkpoint_list_params.CheckpointListParams, + ), + ), + model=FineTuningJobCheckpoint, + ) + + +class CheckpointsWithRawResponse: + def __init__(self, checkpoints: Checkpoints) -> None: + self._checkpoints = checkpoints + + self.list = _legacy_response.to_raw_response_wrapper( + checkpoints.list, + ) + + +class AsyncCheckpointsWithRawResponse: + def __init__(self, checkpoints: AsyncCheckpoints) -> None: + self._checkpoints = checkpoints + + self.list = _legacy_response.async_to_raw_response_wrapper( + checkpoints.list, + ) + + +class CheckpointsWithStreamingResponse: + def __init__(self, checkpoints: Checkpoints) -> None: + self._checkpoints = checkpoints + + self.list = to_streamed_response_wrapper( + checkpoints.list, + ) + + +class AsyncCheckpointsWithStreamingResponse: + def __init__(self, checkpoints: AsyncCheckpoints) -> None: + self._checkpoints = checkpoints + + self.list = async_to_streamed_response_wrapper( + checkpoints.list, + ) diff --git a/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/jobs.py b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/jobs.py new file mode 100644 index 00000000..f38956e6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/fine_tuning/jobs/jobs.py @@ -0,0 +1,686 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable, Optional +from typing_extensions import Literal + +import httpx + +from .... import _legacy_response +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._utils import ( + maybe_transform, + async_maybe_transform, +) +from ...._compat import cached_property +from .checkpoints import ( + Checkpoints, + AsyncCheckpoints, + CheckpointsWithRawResponse, + AsyncCheckpointsWithRawResponse, + CheckpointsWithStreamingResponse, + AsyncCheckpointsWithStreamingResponse, +) +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ....pagination import SyncCursorPage, AsyncCursorPage +from ...._base_client import ( + AsyncPaginator, + make_request_options, +) +from ....types.fine_tuning import job_list_params, job_create_params, job_list_events_params +from ....types.fine_tuning.fine_tuning_job import FineTuningJob +from ....types.fine_tuning.fine_tuning_job_event import FineTuningJobEvent + +__all__ = ["Jobs", "AsyncJobs"] + + +class Jobs(SyncAPIResource): + @cached_property + def checkpoints(self) -> Checkpoints: + return Checkpoints(self._client) + + @cached_property + def with_raw_response(self) -> JobsWithRawResponse: + return JobsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> JobsWithStreamingResponse: + return JobsWithStreamingResponse(self) + + def create( + self, + *, + model: Union[str, Literal["babbage-002", "davinci-002", "gpt-3.5-turbo"]], + training_file: str, + hyperparameters: job_create_params.Hyperparameters | NotGiven = NOT_GIVEN, + integrations: Optional[Iterable[job_create_params.Integration]] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + validation_file: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FineTuningJob: + """ + Creates a fine-tuning job which begins the process of creating a new model from + a given dataset. + + Response includes details of the enqueued job including job status and the name + of the fine-tuned models once complete. + + [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + + Args: + model: The name of the model to fine-tune. You can select one of the + [supported models](https://platform.openai.com/docs/guides/fine-tuning/what-models-can-be-fine-tuned). + + training_file: The ID of an uploaded file that contains training data. + + See [upload file](https://platform.openai.com/docs/api-reference/files/create) + for how to upload a file. + + Your dataset must be formatted as a JSONL file. Additionally, you must upload + your file with the purpose `fine-tune`. + + See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + for more details. + + hyperparameters: The hyperparameters used for the fine-tuning job. + + integrations: A list of integrations to enable for your fine-tuning job. + + seed: The seed controls the reproducibility of the job. Passing in the same seed and + job parameters should produce the same results, but may differ in rare cases. If + a seed is not specified, one will be generated for you. + + suffix: A string of up to 18 characters that will be added to your fine-tuned model + name. + + For example, a `suffix` of "custom-model-name" would produce a model name like + `ft:gpt-3.5-turbo:openai:custom-model-name:7p4lURel`. + + validation_file: The ID of an uploaded file that contains validation data. + + If you provide this file, the data is used to generate validation metrics + periodically during fine-tuning. These metrics can be viewed in the fine-tuning + results file. The same data should not be present in both train and validation + files. + + Your dataset must be formatted as a JSONL file. You must upload your file with + the purpose `fine-tune`. + + See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + for more details. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/fine_tuning/jobs", + body=maybe_transform( + { + "model": model, + "training_file": training_file, + "hyperparameters": hyperparameters, + "integrations": integrations, + "seed": seed, + "suffix": suffix, + "validation_file": validation_file, + }, + job_create_params.JobCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FineTuningJob, + ) + + def retrieve( + self, + fine_tuning_job_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FineTuningJob: + """ + Get info about a fine-tuning job. + + [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return self._get( + f"/fine_tuning/jobs/{fine_tuning_job_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FineTuningJob, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[FineTuningJob]: + """ + List your organization's fine-tuning jobs + + Args: + after: Identifier for the last job from the previous pagination request. + + limit: Number of fine-tuning jobs to retrieve. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/fine_tuning/jobs", + page=SyncCursorPage[FineTuningJob], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + job_list_params.JobListParams, + ), + ), + model=FineTuningJob, + ) + + def cancel( + self, + fine_tuning_job_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FineTuningJob: + """ + Immediately cancel a fine-tune job. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return self._post( + f"/fine_tuning/jobs/{fine_tuning_job_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FineTuningJob, + ) + + def list_events( + self, + fine_tuning_job_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncCursorPage[FineTuningJobEvent]: + """ + Get status updates for a fine-tuning job. + + Args: + after: Identifier for the last event from the previous pagination request. + + limit: Number of events to retrieve. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return self._get_api_list( + f"/fine_tuning/jobs/{fine_tuning_job_id}/events", + page=SyncCursorPage[FineTuningJobEvent], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + job_list_events_params.JobListEventsParams, + ), + ), + model=FineTuningJobEvent, + ) + + +class AsyncJobs(AsyncAPIResource): + @cached_property + def checkpoints(self) -> AsyncCheckpoints: + return AsyncCheckpoints(self._client) + + @cached_property + def with_raw_response(self) -> AsyncJobsWithRawResponse: + return AsyncJobsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncJobsWithStreamingResponse: + return AsyncJobsWithStreamingResponse(self) + + async def create( + self, + *, + model: Union[str, Literal["babbage-002", "davinci-002", "gpt-3.5-turbo"]], + training_file: str, + hyperparameters: job_create_params.Hyperparameters | NotGiven = NOT_GIVEN, + integrations: Optional[Iterable[job_create_params.Integration]] | NotGiven = NOT_GIVEN, + seed: Optional[int] | NotGiven = NOT_GIVEN, + suffix: Optional[str] | NotGiven = NOT_GIVEN, + validation_file: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FineTuningJob: + """ + Creates a fine-tuning job which begins the process of creating a new model from + a given dataset. + + Response includes details of the enqueued job including job status and the name + of the fine-tuned models once complete. + + [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + + Args: + model: The name of the model to fine-tune. You can select one of the + [supported models](https://platform.openai.com/docs/guides/fine-tuning/what-models-can-be-fine-tuned). + + training_file: The ID of an uploaded file that contains training data. + + See [upload file](https://platform.openai.com/docs/api-reference/files/create) + for how to upload a file. + + Your dataset must be formatted as a JSONL file. Additionally, you must upload + your file with the purpose `fine-tune`. + + See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + for more details. + + hyperparameters: The hyperparameters used for the fine-tuning job. + + integrations: A list of integrations to enable for your fine-tuning job. + + seed: The seed controls the reproducibility of the job. Passing in the same seed and + job parameters should produce the same results, but may differ in rare cases. If + a seed is not specified, one will be generated for you. + + suffix: A string of up to 18 characters that will be added to your fine-tuned model + name. + + For example, a `suffix` of "custom-model-name" would produce a model name like + `ft:gpt-3.5-turbo:openai:custom-model-name:7p4lURel`. + + validation_file: The ID of an uploaded file that contains validation data. + + If you provide this file, the data is used to generate validation metrics + periodically during fine-tuning. These metrics can be viewed in the fine-tuning + results file. The same data should not be present in both train and validation + files. + + Your dataset must be formatted as a JSONL file. You must upload your file with + the purpose `fine-tune`. + + See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + for more details. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/fine_tuning/jobs", + body=await async_maybe_transform( + { + "model": model, + "training_file": training_file, + "hyperparameters": hyperparameters, + "integrations": integrations, + "seed": seed, + "suffix": suffix, + "validation_file": validation_file, + }, + job_create_params.JobCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FineTuningJob, + ) + + async def retrieve( + self, + fine_tuning_job_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FineTuningJob: + """ + Get info about a fine-tuning job. + + [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return await self._get( + f"/fine_tuning/jobs/{fine_tuning_job_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FineTuningJob, + ) + + def list( + self, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[FineTuningJob, AsyncCursorPage[FineTuningJob]]: + """ + List your organization's fine-tuning jobs + + Args: + after: Identifier for the last job from the previous pagination request. + + limit: Number of fine-tuning jobs to retrieve. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/fine_tuning/jobs", + page=AsyncCursorPage[FineTuningJob], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + job_list_params.JobListParams, + ), + ), + model=FineTuningJob, + ) + + async def cancel( + self, + fine_tuning_job_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> FineTuningJob: + """ + Immediately cancel a fine-tune job. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return await self._post( + f"/fine_tuning/jobs/{fine_tuning_job_id}/cancel", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=FineTuningJob, + ) + + def list_events( + self, + fine_tuning_job_id: str, + *, + after: str | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[FineTuningJobEvent, AsyncCursorPage[FineTuningJobEvent]]: + """ + Get status updates for a fine-tuning job. + + Args: + after: Identifier for the last event from the previous pagination request. + + limit: Number of events to retrieve. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not fine_tuning_job_id: + raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") + return self._get_api_list( + f"/fine_tuning/jobs/{fine_tuning_job_id}/events", + page=AsyncCursorPage[FineTuningJobEvent], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "after": after, + "limit": limit, + }, + job_list_events_params.JobListEventsParams, + ), + ), + model=FineTuningJobEvent, + ) + + +class JobsWithRawResponse: + def __init__(self, jobs: Jobs) -> None: + self._jobs = jobs + + self.create = _legacy_response.to_raw_response_wrapper( + jobs.create, + ) + self.retrieve = _legacy_response.to_raw_response_wrapper( + jobs.retrieve, + ) + self.list = _legacy_response.to_raw_response_wrapper( + jobs.list, + ) + self.cancel = _legacy_response.to_raw_response_wrapper( + jobs.cancel, + ) + self.list_events = _legacy_response.to_raw_response_wrapper( + jobs.list_events, + ) + + @cached_property + def checkpoints(self) -> CheckpointsWithRawResponse: + return CheckpointsWithRawResponse(self._jobs.checkpoints) + + +class AsyncJobsWithRawResponse: + def __init__(self, jobs: AsyncJobs) -> None: + self._jobs = jobs + + self.create = _legacy_response.async_to_raw_response_wrapper( + jobs.create, + ) + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + jobs.retrieve, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + jobs.list, + ) + self.cancel = _legacy_response.async_to_raw_response_wrapper( + jobs.cancel, + ) + self.list_events = _legacy_response.async_to_raw_response_wrapper( + jobs.list_events, + ) + + @cached_property + def checkpoints(self) -> AsyncCheckpointsWithRawResponse: + return AsyncCheckpointsWithRawResponse(self._jobs.checkpoints) + + +class JobsWithStreamingResponse: + def __init__(self, jobs: Jobs) -> None: + self._jobs = jobs + + self.create = to_streamed_response_wrapper( + jobs.create, + ) + self.retrieve = to_streamed_response_wrapper( + jobs.retrieve, + ) + self.list = to_streamed_response_wrapper( + jobs.list, + ) + self.cancel = to_streamed_response_wrapper( + jobs.cancel, + ) + self.list_events = to_streamed_response_wrapper( + jobs.list_events, + ) + + @cached_property + def checkpoints(self) -> CheckpointsWithStreamingResponse: + return CheckpointsWithStreamingResponse(self._jobs.checkpoints) + + +class AsyncJobsWithStreamingResponse: + def __init__(self, jobs: AsyncJobs) -> None: + self._jobs = jobs + + self.create = async_to_streamed_response_wrapper( + jobs.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + jobs.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + jobs.list, + ) + self.cancel = async_to_streamed_response_wrapper( + jobs.cancel, + ) + self.list_events = async_to_streamed_response_wrapper( + jobs.list_events, + ) + + @cached_property + def checkpoints(self) -> AsyncCheckpointsWithStreamingResponse: + return AsyncCheckpointsWithStreamingResponse(self._jobs.checkpoints) diff --git a/.venv/Lib/site-packages/openai/resources/images.py b/.venv/Lib/site-packages/openai/resources/images.py new file mode 100644 index 00000000..74b2a46a --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/images.py @@ -0,0 +1,583 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Mapping, Optional, cast +from typing_extensions import Literal + +import httpx + +from .. import _legacy_response +from ..types import image_edit_params, image_generate_params, image_create_variation_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes +from .._utils import ( + extract_files, + maybe_transform, + deepcopy_minimal, + async_maybe_transform, +) +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .._base_client import ( + make_request_options, +) +from ..types.images_response import ImagesResponse + +__all__ = ["Images", "AsyncImages"] + + +class Images(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ImagesWithRawResponse: + return ImagesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ImagesWithStreamingResponse: + return ImagesWithStreamingResponse(self) + + def create_variation( + self, + *, + image: FileTypes, + model: Union[str, Literal["dall-e-2"], None] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + response_format: Optional[Literal["url", "b64_json"]] | NotGiven = NOT_GIVEN, + size: Optional[Literal["256x256", "512x512", "1024x1024"]] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ImagesResponse: + """ + Creates a variation of a given image. + + Args: + image: The image to use as the basis for the variation(s). Must be a valid PNG file, + less than 4MB, and square. + + model: The model to use for image generation. Only `dall-e-2` is supported at this + time. + + n: The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + `n=1` is supported. + + response_format: The format in which the generated images are returned. Must be one of `url` or + `b64_json`. URLs are only valid for 60 minutes after the image has been + generated. + + size: The size of the generated images. Must be one of `256x256`, `512x512`, or + `1024x1024`. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "image": image, + "model": model, + "n": n, + "response_format": response_format, + "size": size, + "user": user, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["image"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return self._post( + "/images/variations", + body=maybe_transform(body, image_create_variation_params.ImageCreateVariationParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ImagesResponse, + ) + + def edit( + self, + *, + image: FileTypes, + prompt: str, + mask: FileTypes | NotGiven = NOT_GIVEN, + model: Union[str, Literal["dall-e-2"], None] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + response_format: Optional[Literal["url", "b64_json"]] | NotGiven = NOT_GIVEN, + size: Optional[Literal["256x256", "512x512", "1024x1024"]] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ImagesResponse: + """ + Creates an edited or extended image given an original image and a prompt. + + Args: + image: The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask + is not provided, image must have transparency, which will be used as the mask. + + prompt: A text description of the desired image(s). The maximum length is 1000 + characters. + + mask: An additional image whose fully transparent areas (e.g. where alpha is zero) + indicate where `image` should be edited. Must be a valid PNG file, less than + 4MB, and have the same dimensions as `image`. + + model: The model to use for image generation. Only `dall-e-2` is supported at this + time. + + n: The number of images to generate. Must be between 1 and 10. + + response_format: The format in which the generated images are returned. Must be one of `url` or + `b64_json`. URLs are only valid for 60 minutes after the image has been + generated. + + size: The size of the generated images. Must be one of `256x256`, `512x512`, or + `1024x1024`. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "image": image, + "prompt": prompt, + "mask": mask, + "model": model, + "n": n, + "response_format": response_format, + "size": size, + "user": user, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["image"], ["mask"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return self._post( + "/images/edits", + body=maybe_transform(body, image_edit_params.ImageEditParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ImagesResponse, + ) + + def generate( + self, + *, + prompt: str, + model: Union[str, Literal["dall-e-2", "dall-e-3"], None] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + quality: Literal["standard", "hd"] | NotGiven = NOT_GIVEN, + response_format: Optional[Literal["url", "b64_json"]] | NotGiven = NOT_GIVEN, + size: Optional[Literal["256x256", "512x512", "1024x1024", "1792x1024", "1024x1792"]] | NotGiven = NOT_GIVEN, + style: Optional[Literal["vivid", "natural"]] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ImagesResponse: + """ + Creates an image given a prompt. + + Args: + prompt: A text description of the desired image(s). The maximum length is 1000 + characters for `dall-e-2` and 4000 characters for `dall-e-3`. + + model: The model to use for image generation. + + n: The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + `n=1` is supported. + + quality: The quality of the image that will be generated. `hd` creates images with finer + details and greater consistency across the image. This param is only supported + for `dall-e-3`. + + response_format: The format in which the generated images are returned. Must be one of `url` or + `b64_json`. URLs are only valid for 60 minutes after the image has been + generated. + + size: The size of the generated images. Must be one of `256x256`, `512x512`, or + `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + `1024x1792` for `dall-e-3` models. + + style: The style of the generated images. Must be one of `vivid` or `natural`. Vivid + causes the model to lean towards generating hyper-real and dramatic images. + Natural causes the model to produce more natural, less hyper-real looking + images. This param is only supported for `dall-e-3`. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/images/generations", + body=maybe_transform( + { + "prompt": prompt, + "model": model, + "n": n, + "quality": quality, + "response_format": response_format, + "size": size, + "style": style, + "user": user, + }, + image_generate_params.ImageGenerateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ImagesResponse, + ) + + +class AsyncImages(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncImagesWithRawResponse: + return AsyncImagesWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncImagesWithStreamingResponse: + return AsyncImagesWithStreamingResponse(self) + + async def create_variation( + self, + *, + image: FileTypes, + model: Union[str, Literal["dall-e-2"], None] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + response_format: Optional[Literal["url", "b64_json"]] | NotGiven = NOT_GIVEN, + size: Optional[Literal["256x256", "512x512", "1024x1024"]] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ImagesResponse: + """ + Creates a variation of a given image. + + Args: + image: The image to use as the basis for the variation(s). Must be a valid PNG file, + less than 4MB, and square. + + model: The model to use for image generation. Only `dall-e-2` is supported at this + time. + + n: The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + `n=1` is supported. + + response_format: The format in which the generated images are returned. Must be one of `url` or + `b64_json`. URLs are only valid for 60 minutes after the image has been + generated. + + size: The size of the generated images. Must be one of `256x256`, `512x512`, or + `1024x1024`. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "image": image, + "model": model, + "n": n, + "response_format": response_format, + "size": size, + "user": user, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["image"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return await self._post( + "/images/variations", + body=await async_maybe_transform(body, image_create_variation_params.ImageCreateVariationParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ImagesResponse, + ) + + async def edit( + self, + *, + image: FileTypes, + prompt: str, + mask: FileTypes | NotGiven = NOT_GIVEN, + model: Union[str, Literal["dall-e-2"], None] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + response_format: Optional[Literal["url", "b64_json"]] | NotGiven = NOT_GIVEN, + size: Optional[Literal["256x256", "512x512", "1024x1024"]] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ImagesResponse: + """ + Creates an edited or extended image given an original image and a prompt. + + Args: + image: The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask + is not provided, image must have transparency, which will be used as the mask. + + prompt: A text description of the desired image(s). The maximum length is 1000 + characters. + + mask: An additional image whose fully transparent areas (e.g. where alpha is zero) + indicate where `image` should be edited. Must be a valid PNG file, less than + 4MB, and have the same dimensions as `image`. + + model: The model to use for image generation. Only `dall-e-2` is supported at this + time. + + n: The number of images to generate. Must be between 1 and 10. + + response_format: The format in which the generated images are returned. Must be one of `url` or + `b64_json`. URLs are only valid for 60 minutes after the image has been + generated. + + size: The size of the generated images. Must be one of `256x256`, `512x512`, or + `1024x1024`. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "image": image, + "prompt": prompt, + "mask": mask, + "model": model, + "n": n, + "response_format": response_format, + "size": size, + "user": user, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["image"], ["mask"]]) + if files: + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return await self._post( + "/images/edits", + body=await async_maybe_transform(body, image_edit_params.ImageEditParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ImagesResponse, + ) + + async def generate( + self, + *, + prompt: str, + model: Union[str, Literal["dall-e-2", "dall-e-3"], None] | NotGiven = NOT_GIVEN, + n: Optional[int] | NotGiven = NOT_GIVEN, + quality: Literal["standard", "hd"] | NotGiven = NOT_GIVEN, + response_format: Optional[Literal["url", "b64_json"]] | NotGiven = NOT_GIVEN, + size: Optional[Literal["256x256", "512x512", "1024x1024", "1792x1024", "1024x1792"]] | NotGiven = NOT_GIVEN, + style: Optional[Literal["vivid", "natural"]] | NotGiven = NOT_GIVEN, + user: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ImagesResponse: + """ + Creates an image given a prompt. + + Args: + prompt: A text description of the desired image(s). The maximum length is 1000 + characters for `dall-e-2` and 4000 characters for `dall-e-3`. + + model: The model to use for image generation. + + n: The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + `n=1` is supported. + + quality: The quality of the image that will be generated. `hd` creates images with finer + details and greater consistency across the image. This param is only supported + for `dall-e-3`. + + response_format: The format in which the generated images are returned. Must be one of `url` or + `b64_json`. URLs are only valid for 60 minutes after the image has been + generated. + + size: The size of the generated images. Must be one of `256x256`, `512x512`, or + `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + `1024x1792` for `dall-e-3` models. + + style: The style of the generated images. Must be one of `vivid` or `natural`. Vivid + causes the model to lean towards generating hyper-real and dramatic images. + Natural causes the model to produce more natural, less hyper-real looking + images. This param is only supported for `dall-e-3`. + + user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/images/generations", + body=await async_maybe_transform( + { + "prompt": prompt, + "model": model, + "n": n, + "quality": quality, + "response_format": response_format, + "size": size, + "style": style, + "user": user, + }, + image_generate_params.ImageGenerateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ImagesResponse, + ) + + +class ImagesWithRawResponse: + def __init__(self, images: Images) -> None: + self._images = images + + self.create_variation = _legacy_response.to_raw_response_wrapper( + images.create_variation, + ) + self.edit = _legacy_response.to_raw_response_wrapper( + images.edit, + ) + self.generate = _legacy_response.to_raw_response_wrapper( + images.generate, + ) + + +class AsyncImagesWithRawResponse: + def __init__(self, images: AsyncImages) -> None: + self._images = images + + self.create_variation = _legacy_response.async_to_raw_response_wrapper( + images.create_variation, + ) + self.edit = _legacy_response.async_to_raw_response_wrapper( + images.edit, + ) + self.generate = _legacy_response.async_to_raw_response_wrapper( + images.generate, + ) + + +class ImagesWithStreamingResponse: + def __init__(self, images: Images) -> None: + self._images = images + + self.create_variation = to_streamed_response_wrapper( + images.create_variation, + ) + self.edit = to_streamed_response_wrapper( + images.edit, + ) + self.generate = to_streamed_response_wrapper( + images.generate, + ) + + +class AsyncImagesWithStreamingResponse: + def __init__(self, images: AsyncImages) -> None: + self._images = images + + self.create_variation = async_to_streamed_response_wrapper( + images.create_variation, + ) + self.edit = async_to_streamed_response_wrapper( + images.edit, + ) + self.generate = async_to_streamed_response_wrapper( + images.generate, + ) diff --git a/.venv/Lib/site-packages/openai/resources/models.py b/.venv/Lib/site-packages/openai/resources/models.py new file mode 100644 index 00000000..e76c496f --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/models.py @@ -0,0 +1,284 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from .. import _legacy_response +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from ..pagination import SyncPage, AsyncPage +from ..types.model import Model +from .._base_client import ( + AsyncPaginator, + make_request_options, +) +from ..types.model_deleted import ModelDeleted + +__all__ = ["Models", "AsyncModels"] + + +class Models(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ModelsWithRawResponse: + return ModelsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ModelsWithStreamingResponse: + return ModelsWithStreamingResponse(self) + + def retrieve( + self, + model: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Model: + """ + Retrieves a model instance, providing basic information about the model such as + the owner and permissioning. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not model: + raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") + return self._get( + f"/models/{model}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Model, + ) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SyncPage[Model]: + """ + Lists the currently available models, and provides basic information about each + one such as the owner and availability. + """ + return self._get_api_list( + "/models", + page=SyncPage[Model], + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + model=Model, + ) + + def delete( + self, + model: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ModelDeleted: + """Delete a fine-tuned model. + + You must have the Owner role in your organization to + delete a model. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not model: + raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") + return self._delete( + f"/models/{model}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ModelDeleted, + ) + + +class AsyncModels(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncModelsWithRawResponse: + return AsyncModelsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncModelsWithStreamingResponse: + return AsyncModelsWithStreamingResponse(self) + + async def retrieve( + self, + model: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Model: + """ + Retrieves a model instance, providing basic information about the model such as + the owner and permissioning. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not model: + raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") + return await self._get( + f"/models/{model}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Model, + ) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncPaginator[Model, AsyncPage[Model]]: + """ + Lists the currently available models, and provides basic information about each + one such as the owner and availability. + """ + return self._get_api_list( + "/models", + page=AsyncPage[Model], + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + model=Model, + ) + + async def delete( + self, + model: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ModelDeleted: + """Delete a fine-tuned model. + + You must have the Owner role in your organization to + delete a model. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not model: + raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") + return await self._delete( + f"/models/{model}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ModelDeleted, + ) + + +class ModelsWithRawResponse: + def __init__(self, models: Models) -> None: + self._models = models + + self.retrieve = _legacy_response.to_raw_response_wrapper( + models.retrieve, + ) + self.list = _legacy_response.to_raw_response_wrapper( + models.list, + ) + self.delete = _legacy_response.to_raw_response_wrapper( + models.delete, + ) + + +class AsyncModelsWithRawResponse: + def __init__(self, models: AsyncModels) -> None: + self._models = models + + self.retrieve = _legacy_response.async_to_raw_response_wrapper( + models.retrieve, + ) + self.list = _legacy_response.async_to_raw_response_wrapper( + models.list, + ) + self.delete = _legacy_response.async_to_raw_response_wrapper( + models.delete, + ) + + +class ModelsWithStreamingResponse: + def __init__(self, models: Models) -> None: + self._models = models + + self.retrieve = to_streamed_response_wrapper( + models.retrieve, + ) + self.list = to_streamed_response_wrapper( + models.list, + ) + self.delete = to_streamed_response_wrapper( + models.delete, + ) + + +class AsyncModelsWithStreamingResponse: + def __init__(self, models: AsyncModels) -> None: + self._models = models + + self.retrieve = async_to_streamed_response_wrapper( + models.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + models.list, + ) + self.delete = async_to_streamed_response_wrapper( + models.delete, + ) diff --git a/.venv/Lib/site-packages/openai/resources/moderations.py b/.venv/Lib/site-packages/openai/resources/moderations.py new file mode 100644 index 00000000..9386e50d --- /dev/null +++ b/.venv/Lib/site-packages/openai/resources/moderations.py @@ -0,0 +1,181 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union +from typing_extensions import Literal + +import httpx + +from .. import _legacy_response +from ..types import moderation_create_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import ( + maybe_transform, + async_maybe_transform, +) +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .._base_client import ( + make_request_options, +) +from ..types.moderation_create_response import ModerationCreateResponse + +__all__ = ["Moderations", "AsyncModerations"] + + +class Moderations(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ModerationsWithRawResponse: + return ModerationsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ModerationsWithStreamingResponse: + return ModerationsWithStreamingResponse(self) + + def create( + self, + *, + input: Union[str, List[str]], + model: Union[str, Literal["text-moderation-latest", "text-moderation-stable"]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ModerationCreateResponse: + """ + Classifies if text is potentially harmful. + + Args: + input: The input text to classify + + model: Two content moderations models are available: `text-moderation-stable` and + `text-moderation-latest`. + + The default is `text-moderation-latest` which will be automatically upgraded + over time. This ensures you are always using our most accurate model. If you use + `text-moderation-stable`, we will provide advanced notice before updating the + model. Accuracy of `text-moderation-stable` may be slightly lower than for + `text-moderation-latest`. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/moderations", + body=maybe_transform( + { + "input": input, + "model": model, + }, + moderation_create_params.ModerationCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ModerationCreateResponse, + ) + + +class AsyncModerations(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncModerationsWithRawResponse: + return AsyncModerationsWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncModerationsWithStreamingResponse: + return AsyncModerationsWithStreamingResponse(self) + + async def create( + self, + *, + input: Union[str, List[str]], + model: Union[str, Literal["text-moderation-latest", "text-moderation-stable"]] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ModerationCreateResponse: + """ + Classifies if text is potentially harmful. + + Args: + input: The input text to classify + + model: Two content moderations models are available: `text-moderation-stable` and + `text-moderation-latest`. + + The default is `text-moderation-latest` which will be automatically upgraded + over time. This ensures you are always using our most accurate model. If you use + `text-moderation-stable`, we will provide advanced notice before updating the + model. Accuracy of `text-moderation-stable` may be slightly lower than for + `text-moderation-latest`. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/moderations", + body=await async_maybe_transform( + { + "input": input, + "model": model, + }, + moderation_create_params.ModerationCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ModerationCreateResponse, + ) + + +class ModerationsWithRawResponse: + def __init__(self, moderations: Moderations) -> None: + self._moderations = moderations + + self.create = _legacy_response.to_raw_response_wrapper( + moderations.create, + ) + + +class AsyncModerationsWithRawResponse: + def __init__(self, moderations: AsyncModerations) -> None: + self._moderations = moderations + + self.create = _legacy_response.async_to_raw_response_wrapper( + moderations.create, + ) + + +class ModerationsWithStreamingResponse: + def __init__(self, moderations: Moderations) -> None: + self._moderations = moderations + + self.create = to_streamed_response_wrapper( + moderations.create, + ) + + +class AsyncModerationsWithStreamingResponse: + def __init__(self, moderations: AsyncModerations) -> None: + self._moderations = moderations + + self.create = async_to_streamed_response_wrapper( + moderations.create, + ) diff --git a/.venv/Lib/site-packages/openai/types/__init__.py b/.venv/Lib/site-packages/openai/types/__init__.py new file mode 100644 index 00000000..7873efb3 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/__init__.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .batch import Batch as Batch +from .image import Image as Image +from .model import Model as Model +from .shared import ( + ErrorObject as ErrorObject, + FunctionDefinition as FunctionDefinition, + FunctionParameters as FunctionParameters, +) +from .embedding import Embedding as Embedding +from .chat_model import ChatModel as ChatModel +from .completion import Completion as Completion +from .moderation import Moderation as Moderation +from .batch_error import BatchError as BatchError +from .file_object import FileObject as FileObject +from .file_content import FileContent as FileContent +from .file_deleted import FileDeleted as FileDeleted +from .model_deleted import ModelDeleted as ModelDeleted +from .images_response import ImagesResponse as ImagesResponse +from .completion_usage import CompletionUsage as CompletionUsage +from .file_list_params import FileListParams as FileListParams +from .batch_list_params import BatchListParams as BatchListParams +from .completion_choice import CompletionChoice as CompletionChoice +from .image_edit_params import ImageEditParams as ImageEditParams +from .file_create_params import FileCreateParams as FileCreateParams +from .batch_create_params import BatchCreateParams as BatchCreateParams +from .batch_request_counts import BatchRequestCounts as BatchRequestCounts +from .image_generate_params import ImageGenerateParams as ImageGenerateParams +from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams +from .completion_create_params import CompletionCreateParams as CompletionCreateParams +from .moderation_create_params import ModerationCreateParams as ModerationCreateParams +from .create_embedding_response import CreateEmbeddingResponse as CreateEmbeddingResponse +from .moderation_create_response import ModerationCreateResponse as ModerationCreateResponse +from .image_create_variation_params import ImageCreateVariationParams as ImageCreateVariationParams diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..6db69ec6 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/batch.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/batch.cpython-311.pyc new file mode 100644 index 00000000..467e6b38 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/batch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/batch_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/batch_create_params.cpython-311.pyc new file mode 100644 index 00000000..6c5033de Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/batch_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/batch_error.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/batch_error.cpython-311.pyc new file mode 100644 index 00000000..20d64f99 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/batch_error.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/batch_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/batch_list_params.cpython-311.pyc new file mode 100644 index 00000000..517230d7 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/batch_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/batch_request_counts.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/batch_request_counts.cpython-311.pyc new file mode 100644 index 00000000..120af815 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/batch_request_counts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/chat_model.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/chat_model.cpython-311.pyc new file mode 100644 index 00000000..79cbe7fa Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/chat_model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/completion.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/completion.cpython-311.pyc new file mode 100644 index 00000000..04555f98 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/completion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/completion_choice.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/completion_choice.cpython-311.pyc new file mode 100644 index 00000000..0041ca6d Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/completion_choice.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/completion_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/completion_create_params.cpython-311.pyc new file mode 100644 index 00000000..ceb82658 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/completion_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/completion_usage.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/completion_usage.cpython-311.pyc new file mode 100644 index 00000000..c11959da Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/completion_usage.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/create_embedding_response.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/create_embedding_response.cpython-311.pyc new file mode 100644 index 00000000..bda65892 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/create_embedding_response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/embedding.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/embedding.cpython-311.pyc new file mode 100644 index 00000000..8a1e8b2d Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/embedding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/embedding_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/embedding_create_params.cpython-311.pyc new file mode 100644 index 00000000..9aa651bc Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/embedding_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/file_content.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/file_content.cpython-311.pyc new file mode 100644 index 00000000..9903a43f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/file_content.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/file_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/file_create_params.cpython-311.pyc new file mode 100644 index 00000000..9c212ca7 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/file_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/file_deleted.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/file_deleted.cpython-311.pyc new file mode 100644 index 00000000..7b8e166d Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/file_deleted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/file_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/file_list_params.cpython-311.pyc new file mode 100644 index 00000000..82c4ac5f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/file_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/file_object.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/file_object.cpython-311.pyc new file mode 100644 index 00000000..497abbdd Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/file_object.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/image.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/image.cpython-311.pyc new file mode 100644 index 00000000..9a4e4acd Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/image.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/image_create_variation_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/image_create_variation_params.cpython-311.pyc new file mode 100644 index 00000000..69a05153 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/image_create_variation_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/image_edit_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/image_edit_params.cpython-311.pyc new file mode 100644 index 00000000..3b5ac623 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/image_edit_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/image_generate_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/image_generate_params.cpython-311.pyc new file mode 100644 index 00000000..9e0374cf Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/image_generate_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/images_response.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/images_response.cpython-311.pyc new file mode 100644 index 00000000..7bcc171e Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/images_response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/model.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/model.cpython-311.pyc new file mode 100644 index 00000000..2439d794 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/model_deleted.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/model_deleted.cpython-311.pyc new file mode 100644 index 00000000..893738a3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/model_deleted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/moderation.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/moderation.cpython-311.pyc new file mode 100644 index 00000000..eb24bab8 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/moderation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/moderation_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/moderation_create_params.cpython-311.pyc new file mode 100644 index 00000000..0ac567e6 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/moderation_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/__pycache__/moderation_create_response.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/__pycache__/moderation_create_response.cpython-311.pyc new file mode 100644 index 00000000..32afab16 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/__pycache__/moderation_create_response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/audio/__init__.py b/.venv/Lib/site-packages/openai/types/audio/__init__.py new file mode 100644 index 00000000..8d2c44c8 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/audio/__init__.py @@ -0,0 +1,9 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .translation import Translation as Translation +from .transcription import Transcription as Transcription +from .speech_create_params import SpeechCreateParams as SpeechCreateParams +from .translation_create_params import TranslationCreateParams as TranslationCreateParams +from .transcription_create_params import TranscriptionCreateParams as TranscriptionCreateParams diff --git a/.venv/Lib/site-packages/openai/types/audio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/audio/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..15a80a31 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/audio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/audio/__pycache__/speech_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/audio/__pycache__/speech_create_params.cpython-311.pyc new file mode 100644 index 00000000..06059a5f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/audio/__pycache__/speech_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/audio/__pycache__/transcription.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/audio/__pycache__/transcription.cpython-311.pyc new file mode 100644 index 00000000..a807b9d0 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/audio/__pycache__/transcription.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/audio/__pycache__/transcription_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/audio/__pycache__/transcription_create_params.cpython-311.pyc new file mode 100644 index 00000000..300d6750 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/audio/__pycache__/transcription_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/audio/__pycache__/translation.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/audio/__pycache__/translation.cpython-311.pyc new file mode 100644 index 00000000..e1a51a6b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/audio/__pycache__/translation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/audio/__pycache__/translation_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/audio/__pycache__/translation_create_params.cpython-311.pyc new file mode 100644 index 00000000..a2ec5b35 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/audio/__pycache__/translation_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/audio/speech_create_params.py b/.venv/Lib/site-packages/openai/types/audio/speech_create_params.py new file mode 100644 index 00000000..8d75ec4c --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/audio/speech_create_params.py @@ -0,0 +1,39 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["SpeechCreateParams"] + + +class SpeechCreateParams(TypedDict, total=False): + input: Required[str] + """The text to generate audio for. The maximum length is 4096 characters.""" + + model: Required[Union[str, Literal["tts-1", "tts-1-hd"]]] + """ + One of the available [TTS models](https://platform.openai.com/docs/models/tts): + `tts-1` or `tts-1-hd` + """ + + voice: Required[Literal["alloy", "echo", "fable", "onyx", "nova", "shimmer"]] + """The voice to use when generating the audio. + + Supported voices are `alloy`, `echo`, `fable`, `onyx`, `nova`, and `shimmer`. + Previews of the voices are available in the + [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech/voice-options). + """ + + response_format: Literal["mp3", "opus", "aac", "flac", "wav", "pcm"] + """The format to audio in. + + Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav`, and `pcm`. + """ + + speed: float + """The speed of the generated audio. + + Select a value from `0.25` to `4.0`. `1.0` is the default. + """ diff --git a/.venv/Lib/site-packages/openai/types/audio/transcription.py b/.venv/Lib/site-packages/openai/types/audio/transcription.py new file mode 100644 index 00000000..0b6ab39e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/audio/transcription.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from ..._models import BaseModel + +__all__ = ["Transcription"] + + +class Transcription(BaseModel): + text: str + """The transcribed text.""" diff --git a/.venv/Lib/site-packages/openai/types/audio/transcription_create_params.py b/.venv/Lib/site-packages/openai/types/audio/transcription_create_params.py new file mode 100644 index 00000000..6b2d5bae --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/audio/transcription_create_params.py @@ -0,0 +1,65 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union +from typing_extensions import Literal, Required, TypedDict + +from ..._types import FileTypes + +__all__ = ["TranscriptionCreateParams"] + + +class TranscriptionCreateParams(TypedDict, total=False): + file: Required[FileTypes] + """ + The audio file object (not file name) to transcribe, in one of these formats: + flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + """ + + model: Required[Union[str, Literal["whisper-1"]]] + """ID of the model to use. + + Only `whisper-1` (which is powered by our open source Whisper V2 model) is + currently available. + """ + + language: str + """The language of the input audio. + + Supplying the input language in + [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will + improve accuracy and latency. + """ + + prompt: str + """An optional text to guide the model's style or continue a previous audio + segment. + + The [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) + should match the audio language. + """ + + response_format: Literal["json", "text", "srt", "verbose_json", "vtt"] + """ + The format of the transcript output, in one of these options: `json`, `text`, + `srt`, `verbose_json`, or `vtt`. + """ + + temperature: float + """The sampling temperature, between 0 and 1. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. If set to 0, the model will use + [log probability](https://en.wikipedia.org/wiki/Log_probability) to + automatically increase the temperature until certain thresholds are hit. + """ + + timestamp_granularities: List[Literal["word", "segment"]] + """The timestamp granularities to populate for this transcription. + + `response_format` must be set `verbose_json` to use timestamp granularities. + Either or both of these options are supported: `word`, or `segment`. Note: There + is no additional latency for segment timestamps, but generating word timestamps + incurs additional latency. + """ diff --git a/.venv/Lib/site-packages/openai/types/audio/translation.py b/.venv/Lib/site-packages/openai/types/audio/translation.py new file mode 100644 index 00000000..3d9ede29 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/audio/translation.py @@ -0,0 +1,11 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from ..._models import BaseModel + +__all__ = ["Translation"] + + +class Translation(BaseModel): + text: str diff --git a/.venv/Lib/site-packages/openai/types/audio/translation_create_params.py b/.venv/Lib/site-packages/openai/types/audio/translation_create_params.py new file mode 100644 index 00000000..f23a41ed --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/audio/translation_create_params.py @@ -0,0 +1,48 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal, Required, TypedDict + +from ..._types import FileTypes + +__all__ = ["TranslationCreateParams"] + + +class TranslationCreateParams(TypedDict, total=False): + file: Required[FileTypes] + """ + The audio file object (not file name) translate, in one of these formats: flac, + mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + """ + + model: Required[Union[str, Literal["whisper-1"]]] + """ID of the model to use. + + Only `whisper-1` (which is powered by our open source Whisper V2 model) is + currently available. + """ + + prompt: str + """An optional text to guide the model's style or continue a previous audio + segment. + + The [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) + should be in English. + """ + + response_format: str + """ + The format of the transcript output, in one of these options: `json`, `text`, + `srt`, `verbose_json`, or `vtt`. + """ + + temperature: float + """The sampling temperature, between 0 and 1. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. If set to 0, the model will use + [log probability](https://en.wikipedia.org/wiki/Log_probability) to + automatically increase the temperature until certain thresholds are hit. + """ diff --git a/.venv/Lib/site-packages/openai/types/batch.py b/.venv/Lib/site-packages/openai/types/batch.py new file mode 100644 index 00000000..90f6d795 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/batch.py @@ -0,0 +1,85 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import builtins +from typing import List, Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .batch_error import BatchError +from .batch_request_counts import BatchRequestCounts + +__all__ = ["Batch", "Errors"] + + +class Errors(BaseModel): + data: Optional[List[BatchError]] = None + + object: Optional[str] = None + """The object type, which is always `list`.""" + + +class Batch(BaseModel): + id: str + + completion_window: str + """The time frame within which the batch should be processed.""" + + created_at: int + """The Unix timestamp (in seconds) for when the batch was created.""" + + endpoint: str + """The OpenAI API endpoint used by the batch.""" + + input_file_id: str + """The ID of the input file for the batch.""" + + object: Literal["batch"] + """The object type, which is always `batch`.""" + + status: Literal[ + "validating", "failed", "in_progress", "finalizing", "completed", "expired", "cancelling", "cancelled" + ] + """The current status of the batch.""" + + cancelled_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch was cancelled.""" + + cancelling_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch started cancelling.""" + + completed_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch was completed.""" + + error_file_id: Optional[str] = None + """The ID of the file containing the outputs of requests with errors.""" + + errors: Optional[Errors] = None + + expired_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch expired.""" + + expires_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch will expire.""" + + failed_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch failed.""" + + finalizing_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch started finalizing.""" + + in_progress_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the batch started processing.""" + + metadata: Optional[builtins.object] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + output_file_id: Optional[str] = None + """The ID of the file containing the outputs of successfully executed requests.""" + + request_counts: Optional[BatchRequestCounts] = None + """The request counts for different statuses within the batch.""" diff --git a/.venv/Lib/site-packages/openai/types/batch_create_params.py b/.venv/Lib/site-packages/openai/types/batch_create_params.py new file mode 100644 index 00000000..63b4fae9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/batch_create_params.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["BatchCreateParams"] + + +class BatchCreateParams(TypedDict, total=False): + completion_window: Required[Literal["24h"]] + """The time frame within which the batch should be processed. + + Currently only `24h` is supported. + """ + + endpoint: Required[Literal["/v1/chat/completions", "/v1/embeddings"]] + """The endpoint to be used for all requests in the batch. + + Currently `/v1/chat/completions` and `/v1/embeddings` are supported. + """ + + input_file_id: Required[str] + """The ID of an uploaded file that contains requests for the new batch. + + See [upload file](https://platform.openai.com/docs/api-reference/files/create) + for how to upload a file. + + Your input file must be formatted as a + [JSONL file](https://platform.openai.com/docs/api-reference/batch/requestInput), + and must be uploaded with the purpose `batch`. + """ + + metadata: Optional[Dict[str, str]] + """Optional custom metadata for the batch.""" diff --git a/.venv/Lib/site-packages/openai/types/batch_error.py b/.venv/Lib/site-packages/openai/types/batch_error.py new file mode 100644 index 00000000..1cdd808d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/batch_error.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from .._models import BaseModel + +__all__ = ["BatchError"] + + +class BatchError(BaseModel): + code: Optional[str] = None + """An error code identifying the error type.""" + + line: Optional[int] = None + """The line number of the input file where the error occurred, if applicable.""" + + message: Optional[str] = None + """A human-readable message providing more details about the error.""" + + param: Optional[str] = None + """The name of the parameter that caused the error, if applicable.""" diff --git a/.venv/Lib/site-packages/openai/types/batch_list_params.py b/.venv/Lib/site-packages/openai/types/batch_list_params.py new file mode 100644 index 00000000..ef5e966b --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/batch_list_params.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["BatchListParams"] + + +class BatchListParams(TypedDict, total=False): + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ diff --git a/.venv/Lib/site-packages/openai/types/batch_request_counts.py b/.venv/Lib/site-packages/openai/types/batch_request_counts.py new file mode 100644 index 00000000..ef6c84a0 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/batch_request_counts.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from .._models import BaseModel + +__all__ = ["BatchRequestCounts"] + + +class BatchRequestCounts(BaseModel): + completed: int + """Number of requests that have been completed successfully.""" + + failed: int + """Number of requests that have failed.""" + + total: int + """Total number of requests in the batch.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/__init__.py b/.venv/Lib/site-packages/openai/types/beta/__init__.py new file mode 100644 index 00000000..d851a361 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/__init__.py @@ -0,0 +1,40 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .thread import Thread as Thread +from .assistant import Assistant as Assistant +from .vector_store import VectorStore as VectorStore +from .function_tool import FunctionTool as FunctionTool +from .assistant_tool import AssistantTool as AssistantTool +from .thread_deleted import ThreadDeleted as ThreadDeleted +from .file_search_tool import FileSearchTool as FileSearchTool +from .assistant_deleted import AssistantDeleted as AssistantDeleted +from .function_tool_param import FunctionToolParam as FunctionToolParam +from .assistant_tool_param import AssistantToolParam as AssistantToolParam +from .thread_create_params import ThreadCreateParams as ThreadCreateParams +from .thread_update_params import ThreadUpdateParams as ThreadUpdateParams +from .vector_store_deleted import VectorStoreDeleted as VectorStoreDeleted +from .assistant_list_params import AssistantListParams as AssistantListParams +from .assistant_tool_choice import AssistantToolChoice as AssistantToolChoice +from .code_interpreter_tool import CodeInterpreterTool as CodeInterpreterTool +from .assistant_stream_event import AssistantStreamEvent as AssistantStreamEvent +from .file_search_tool_param import FileSearchToolParam as FileSearchToolParam +from .assistant_create_params import AssistantCreateParams as AssistantCreateParams +from .assistant_update_params import AssistantUpdateParams as AssistantUpdateParams +from .vector_store_list_params import VectorStoreListParams as VectorStoreListParams +from .assistant_response_format import AssistantResponseFormat as AssistantResponseFormat +from .vector_store_create_params import VectorStoreCreateParams as VectorStoreCreateParams +from .vector_store_update_params import VectorStoreUpdateParams as VectorStoreUpdateParams +from .assistant_tool_choice_param import AssistantToolChoiceParam as AssistantToolChoiceParam +from .code_interpreter_tool_param import CodeInterpreterToolParam as CodeInterpreterToolParam +from .assistant_tool_choice_option import AssistantToolChoiceOption as AssistantToolChoiceOption +from .thread_create_and_run_params import ThreadCreateAndRunParams as ThreadCreateAndRunParams +from .assistant_tool_choice_function import AssistantToolChoiceFunction as AssistantToolChoiceFunction +from .assistant_response_format_param import AssistantResponseFormatParam as AssistantResponseFormatParam +from .assistant_response_format_option import AssistantResponseFormatOption as AssistantResponseFormatOption +from .assistant_tool_choice_option_param import AssistantToolChoiceOptionParam as AssistantToolChoiceOptionParam +from .assistant_tool_choice_function_param import AssistantToolChoiceFunctionParam as AssistantToolChoiceFunctionParam +from .assistant_response_format_option_param import ( + AssistantResponseFormatOptionParam as AssistantResponseFormatOptionParam, +) diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..3f90ab1b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant.cpython-311.pyc new file mode 100644 index 00000000..7d75c870 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_create_params.cpython-311.pyc new file mode 100644 index 00000000..d0ddaaba Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_deleted.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_deleted.cpython-311.pyc new file mode 100644 index 00000000..dfb70fd4 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_deleted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_list_params.cpython-311.pyc new file mode 100644 index 00000000..302f7291 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format.cpython-311.pyc new file mode 100644 index 00000000..d2987554 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_option.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_option.cpython-311.pyc new file mode 100644 index 00000000..429f4a07 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_option.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_option_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_option_param.cpython-311.pyc new file mode 100644 index 00000000..d9bb4157 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_option_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_param.cpython-311.pyc new file mode 100644 index 00000000..9d0d6d14 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_response_format_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_stream_event.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_stream_event.cpython-311.pyc new file mode 100644 index 00000000..e16b5bde Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_stream_event.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool.cpython-311.pyc new file mode 100644 index 00000000..0cc129a5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice.cpython-311.pyc new file mode 100644 index 00000000..acc95d6c Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_function.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_function.cpython-311.pyc new file mode 100644 index 00000000..125fce47 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_function_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_function_param.cpython-311.pyc new file mode 100644 index 00000000..fa8799b4 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_function_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_option.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_option.cpython-311.pyc new file mode 100644 index 00000000..37ed9703 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_option.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_option_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_option_param.cpython-311.pyc new file mode 100644 index 00000000..c216c790 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_option_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_param.cpython-311.pyc new file mode 100644 index 00000000..cc00d002 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_choice_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_param.cpython-311.pyc new file mode 100644 index 00000000..97a85415 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_tool_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_update_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_update_params.cpython-311.pyc new file mode 100644 index 00000000..4c78123b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/assistant_update_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/code_interpreter_tool.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/code_interpreter_tool.cpython-311.pyc new file mode 100644 index 00000000..50233041 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/code_interpreter_tool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/code_interpreter_tool_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/code_interpreter_tool_param.cpython-311.pyc new file mode 100644 index 00000000..605d958a Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/code_interpreter_tool_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/file_search_tool.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/file_search_tool.cpython-311.pyc new file mode 100644 index 00000000..7028aebe Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/file_search_tool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/file_search_tool_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/file_search_tool_param.cpython-311.pyc new file mode 100644 index 00000000..83261462 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/file_search_tool_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/function_tool.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/function_tool.cpython-311.pyc new file mode 100644 index 00000000..24524f2b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/function_tool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/function_tool_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/function_tool_param.cpython-311.pyc new file mode 100644 index 00000000..ab06e968 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/function_tool_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread.cpython-311.pyc new file mode 100644 index 00000000..4119b073 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_create_and_run_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_create_and_run_params.cpython-311.pyc new file mode 100644 index 00000000..bbba5c99 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_create_and_run_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_create_params.cpython-311.pyc new file mode 100644 index 00000000..12d78f60 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_deleted.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_deleted.cpython-311.pyc new file mode 100644 index 00000000..7c8399a3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_deleted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_update_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_update_params.cpython-311.pyc new file mode 100644 index 00000000..c314e832 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/thread_update_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store.cpython-311.pyc new file mode 100644 index 00000000..319a48a7 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_create_params.cpython-311.pyc new file mode 100644 index 00000000..cde70818 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_deleted.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_deleted.cpython-311.pyc new file mode 100644 index 00000000..3bfe24ac Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_deleted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_list_params.cpython-311.pyc new file mode 100644 index 00000000..eff50f4f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_update_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_update_params.cpython-311.pyc new file mode 100644 index 00000000..fe5e02be Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/__pycache__/vector_store_update_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant.py b/.venv/Lib/site-packages/openai/types/beta/assistant.py new file mode 100644 index 00000000..0b997e0b --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant.py @@ -0,0 +1,126 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel +from .assistant_tool import AssistantTool +from .assistant_response_format_option import AssistantResponseFormatOption + +__all__ = ["Assistant", "ToolResources", "ToolResourcesCodeInterpreter", "ToolResourcesFileSearch"] + + +class ToolResourcesCodeInterpreter(BaseModel): + file_ids: Optional[List[str]] = None + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + available to the `code_interpreter`` tool. There can be a maximum of 20 files + associated with the tool. + """ + + +class ToolResourcesFileSearch(BaseModel): + vector_store_ids: Optional[List[str]] = None + """ + The ID of the + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this assistant. There can be a maximum of 1 vector store attached to + the assistant. + """ + + +class ToolResources(BaseModel): + code_interpreter: Optional[ToolResourcesCodeInterpreter] = None + + file_search: Optional[ToolResourcesFileSearch] = None + + +class Assistant(BaseModel): + id: str + """The identifier, which can be referenced in API endpoints.""" + + created_at: int + """The Unix timestamp (in seconds) for when the assistant was created.""" + + description: Optional[str] = None + """The description of the assistant. The maximum length is 512 characters.""" + + instructions: Optional[str] = None + """The system instructions that the assistant uses. + + The maximum length is 256,000 characters. + """ + + metadata: Optional[object] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + model: str + """ID of the model to use. + + You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + """ + + name: Optional[str] = None + """The name of the assistant. The maximum length is 256 characters.""" + + object: Literal["assistant"] + """The object type, which is always `assistant`.""" + + tools: List[AssistantTool] + """A list of tool enabled on the assistant. + + There can be a maximum of 128 tools per assistant. Tools can be of types + `code_interpreter`, `file_search`, or `function`. + """ + + response_format: Optional[AssistantResponseFormatOption] = None + """Specifies the format that the model must output. + + Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + """ + + temperature: Optional[float] = None + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + """ + + tool_resources: Optional[ToolResources] = None + """A set of resources that are used by the assistant's tools. + + The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ + + top_p: Optional[float] = None + """ + An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_create_params.py b/.venv/Lib/site-packages/openai/types/beta/assistant_create_params.py new file mode 100644 index 00000000..e9ff66df --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_create_params.py @@ -0,0 +1,173 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from .assistant_tool_param import AssistantToolParam +from .assistant_response_format_option_param import AssistantResponseFormatOptionParam + +__all__ = [ + "AssistantCreateParams", + "ToolResources", + "ToolResourcesCodeInterpreter", + "ToolResourcesFileSearch", + "ToolResourcesFileSearchVectorStore", +] + + +class AssistantCreateParams(TypedDict, total=False): + model: Required[ + Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + ] + ] + """ID of the model to use. + + You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + """ + + description: Optional[str] + """The description of the assistant. The maximum length is 512 characters.""" + + instructions: Optional[str] + """The system instructions that the assistant uses. + + The maximum length is 256,000 characters. + """ + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + name: Optional[str] + """The name of the assistant. The maximum length is 256 characters.""" + + response_format: Optional[AssistantResponseFormatOptionParam] + """Specifies the format that the model must output. + + Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + """ + + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + """ + + tool_resources: Optional[ToolResources] + """A set of resources that are used by the assistant's tools. + + The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ + + tools: Iterable[AssistantToolParam] + """A list of tool enabled on the assistant. + + There can be a maximum of 128 tools per assistant. Tools can be of types + `code_interpreter`, `file_search`, or `function`. + """ + + top_p: Optional[float] + """ + An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + """ + + +class ToolResourcesCodeInterpreter(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + available to the `code_interpreter` tool. There can be a maximum of 20 files + associated with the tool. + """ + + +class ToolResourcesFileSearchVectorStore(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + add to the vector store. There can be a maximum of 10000 files in a vector + store. + """ + + metadata: object + """Set of 16 key-value pairs that can be attached to a vector store. + + This can be useful for storing additional information about the vector store in + a structured format. Keys can be a maximum of 64 characters long and values can + be a maxium of 512 characters long. + """ + + +class ToolResourcesFileSearch(TypedDict, total=False): + vector_store_ids: List[str] + """ + The + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this assistant. There can be a maximum of 1 vector store attached to + the assistant. + """ + + vector_stores: Iterable[ToolResourcesFileSearchVectorStore] + """ + A helper to create a + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + with file_ids and attach it to this assistant. There can be a maximum of 1 + vector store attached to the assistant. + """ + + +class ToolResources(TypedDict, total=False): + code_interpreter: ToolResourcesCodeInterpreter + + file_search: ToolResourcesFileSearch diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_deleted.py b/.venv/Lib/site-packages/openai/types/beta/assistant_deleted.py new file mode 100644 index 00000000..3be40cd6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_deleted.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["AssistantDeleted"] + + +class AssistantDeleted(BaseModel): + id: str + + deleted: bool + + object: Literal["assistant.deleted"] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_list_params.py b/.venv/Lib/site-packages/openai/types/beta/assistant_list_params.py new file mode 100644 index 00000000..f54f6312 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_list_params.py @@ -0,0 +1,39 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["AssistantListParams"] + + +class AssistantListParams(TypedDict, total=False): + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + before: str + """A cursor for use in pagination. + + `before` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page + of the list. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ + + order: Literal["asc", "desc"] + """Sort order by the `created_at` timestamp of the objects. + + `asc` for ascending order and `desc` for descending order. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_response_format.py b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format.py new file mode 100644 index 00000000..f53bdaf6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["AssistantResponseFormat"] + + +class AssistantResponseFormat(BaseModel): + type: Optional[Literal["text", "json_object"]] = None + """Must be one of `text` or `json_object`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_option.py b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_option.py new file mode 100644 index 00000000..d4e05e0e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_option.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Literal + +from .assistant_response_format import AssistantResponseFormat + +__all__ = ["AssistantResponseFormatOption"] + +AssistantResponseFormatOption = Union[Literal["none", "auto"], AssistantResponseFormat] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_option_param.py b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_option_param.py new file mode 100644 index 00000000..46e04125 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_option_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal + +from .assistant_response_format_param import AssistantResponseFormatParam + +__all__ = ["AssistantResponseFormatOptionParam"] + +AssistantResponseFormatOptionParam = Union[Literal["none", "auto"], AssistantResponseFormatParam] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_param.py b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_param.py new file mode 100644 index 00000000..96e1d021 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_response_format_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["AssistantResponseFormatParam"] + + +class AssistantResponseFormatParam(TypedDict, total=False): + type: Literal["text", "json_object"] + """Must be one of `text` or `json_object`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_stream_event.py b/.venv/Lib/site-packages/openai/types/beta/assistant_stream_event.py new file mode 100644 index 00000000..91925e93 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_stream_event.py @@ -0,0 +1,279 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Literal, Annotated + +from .thread import Thread +from ..._utils import PropertyInfo +from ..._models import BaseModel +from .threads.run import Run +from .threads.message import Message +from ..shared.error_object import ErrorObject +from .threads.runs.run_step import RunStep +from .threads.message_delta_event import MessageDeltaEvent +from .threads.runs.run_step_delta_event import RunStepDeltaEvent + +__all__ = [ + "AssistantStreamEvent", + "ThreadCreated", + "ThreadRunCreated", + "ThreadRunQueued", + "ThreadRunInProgress", + "ThreadRunRequiresAction", + "ThreadRunCompleted", + "ThreadRunFailed", + "ThreadRunCancelling", + "ThreadRunCancelled", + "ThreadRunExpired", + "ThreadRunStepCreated", + "ThreadRunStepInProgress", + "ThreadRunStepDelta", + "ThreadRunStepCompleted", + "ThreadRunStepFailed", + "ThreadRunStepCancelled", + "ThreadRunStepExpired", + "ThreadMessageCreated", + "ThreadMessageInProgress", + "ThreadMessageDelta", + "ThreadMessageCompleted", + "ThreadMessageIncomplete", + "ErrorEvent", +] + + +class ThreadCreated(BaseModel): + data: Thread + """ + Represents a thread that contains + [messages](https://platform.openai.com/docs/api-reference/messages). + """ + + event: Literal["thread.created"] + + +class ThreadRunCreated(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.created"] + + +class ThreadRunQueued(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.queued"] + + +class ThreadRunInProgress(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.in_progress"] + + +class ThreadRunRequiresAction(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.requires_action"] + + +class ThreadRunCompleted(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.completed"] + + +class ThreadRunFailed(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.failed"] + + +class ThreadRunCancelling(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.cancelling"] + + +class ThreadRunCancelled(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.cancelled"] + + +class ThreadRunExpired(BaseModel): + data: Run + """ + Represents an execution run on a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.run.expired"] + + +class ThreadRunStepCreated(BaseModel): + data: RunStep + """Represents a step in execution of a run.""" + + event: Literal["thread.run.step.created"] + + +class ThreadRunStepInProgress(BaseModel): + data: RunStep + """Represents a step in execution of a run.""" + + event: Literal["thread.run.step.in_progress"] + + +class ThreadRunStepDelta(BaseModel): + data: RunStepDeltaEvent + """Represents a run step delta i.e. + + any changed fields on a run step during streaming. + """ + + event: Literal["thread.run.step.delta"] + + +class ThreadRunStepCompleted(BaseModel): + data: RunStep + """Represents a step in execution of a run.""" + + event: Literal["thread.run.step.completed"] + + +class ThreadRunStepFailed(BaseModel): + data: RunStep + """Represents a step in execution of a run.""" + + event: Literal["thread.run.step.failed"] + + +class ThreadRunStepCancelled(BaseModel): + data: RunStep + """Represents a step in execution of a run.""" + + event: Literal["thread.run.step.cancelled"] + + +class ThreadRunStepExpired(BaseModel): + data: RunStep + """Represents a step in execution of a run.""" + + event: Literal["thread.run.step.expired"] + + +class ThreadMessageCreated(BaseModel): + data: Message + """ + Represents a message within a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.message.created"] + + +class ThreadMessageInProgress(BaseModel): + data: Message + """ + Represents a message within a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.message.in_progress"] + + +class ThreadMessageDelta(BaseModel): + data: MessageDeltaEvent + """Represents a message delta i.e. + + any changed fields on a message during streaming. + """ + + event: Literal["thread.message.delta"] + + +class ThreadMessageCompleted(BaseModel): + data: Message + """ + Represents a message within a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.message.completed"] + + +class ThreadMessageIncomplete(BaseModel): + data: Message + """ + Represents a message within a + [thread](https://platform.openai.com/docs/api-reference/threads). + """ + + event: Literal["thread.message.incomplete"] + + +class ErrorEvent(BaseModel): + data: ErrorObject + + event: Literal["error"] + + +AssistantStreamEvent = Annotated[ + Union[ + ThreadCreated, + ThreadRunCreated, + ThreadRunQueued, + ThreadRunInProgress, + ThreadRunRequiresAction, + ThreadRunCompleted, + ThreadRunFailed, + ThreadRunCancelling, + ThreadRunCancelled, + ThreadRunExpired, + ThreadRunStepCreated, + ThreadRunStepInProgress, + ThreadRunStepDelta, + ThreadRunStepCompleted, + ThreadRunStepFailed, + ThreadRunStepCancelled, + ThreadRunStepExpired, + ThreadMessageCreated, + ThreadMessageInProgress, + ThreadMessageDelta, + ThreadMessageCompleted, + ThreadMessageIncomplete, + ErrorEvent, + ], + PropertyInfo(discriminator="event"), +] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool.py new file mode 100644 index 00000000..7832da48 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated + +from ..._utils import PropertyInfo +from .function_tool import FunctionTool +from .file_search_tool import FileSearchTool +from .code_interpreter_tool import CodeInterpreterTool + +__all__ = ["AssistantTool"] + +AssistantTool = Annotated[Union[CodeInterpreterTool, FileSearchTool, FunctionTool], PropertyInfo(discriminator="type")] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice.py new file mode 100644 index 00000000..d73439f0 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ..._models import BaseModel +from .assistant_tool_choice_function import AssistantToolChoiceFunction + +__all__ = ["AssistantToolChoice"] + + +class AssistantToolChoice(BaseModel): + type: Literal["function", "code_interpreter", "file_search"] + """The type of the tool. If type is `function`, the function name must be set""" + + function: Optional[AssistantToolChoiceFunction] = None diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_function.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_function.py new file mode 100644 index 00000000..d0d42553 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_function.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from ..._models import BaseModel + +__all__ = ["AssistantToolChoiceFunction"] + + +class AssistantToolChoiceFunction(BaseModel): + name: str + """The name of the function to call.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_function_param.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_function_param.py new file mode 100644 index 00000000..428857de --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_function_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["AssistantToolChoiceFunctionParam"] + + +class AssistantToolChoiceFunctionParam(TypedDict, total=False): + name: Required[str] + """The name of the function to call.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_option.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_option.py new file mode 100644 index 00000000..8958bc8f --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_option.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Literal + +from .assistant_tool_choice import AssistantToolChoice + +__all__ = ["AssistantToolChoiceOption"] + +AssistantToolChoiceOption = Union[Literal["none", "auto", "required"], AssistantToolChoice] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_option_param.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_option_param.py new file mode 100644 index 00000000..81b7f151 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_option_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal + +from .assistant_tool_choice_param import AssistantToolChoiceParam + +__all__ = ["AssistantToolChoiceOptionParam"] + +AssistantToolChoiceOptionParam = Union[Literal["none", "auto", "required"], AssistantToolChoiceParam] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_param.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_param.py new file mode 100644 index 00000000..904f489e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_choice_param.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +from .assistant_tool_choice_function_param import AssistantToolChoiceFunctionParam + +__all__ = ["AssistantToolChoiceParam"] + + +class AssistantToolChoiceParam(TypedDict, total=False): + type: Required[Literal["function", "code_interpreter", "file_search"]] + """The type of the tool. If type is `function`, the function name must be set""" + + function: AssistantToolChoiceFunctionParam diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_tool_param.py b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_param.py new file mode 100644 index 00000000..5b1d30ba --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_tool_param.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union + +from .function_tool_param import FunctionToolParam +from .file_search_tool_param import FileSearchToolParam +from .code_interpreter_tool_param import CodeInterpreterToolParam + +__all__ = ["AssistantToolParam"] + +AssistantToolParam = Union[CodeInterpreterToolParam, FileSearchToolParam, FunctionToolParam] diff --git a/.venv/Lib/site-packages/openai/types/beta/assistant_update_params.py b/.venv/Lib/site-packages/openai/types/beta/assistant_update_params.py new file mode 100644 index 00000000..55c846ce --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/assistant_update_params.py @@ -0,0 +1,119 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Iterable, Optional +from typing_extensions import TypedDict + +from .assistant_tool_param import AssistantToolParam +from .assistant_response_format_option_param import AssistantResponseFormatOptionParam + +__all__ = ["AssistantUpdateParams", "ToolResources", "ToolResourcesCodeInterpreter", "ToolResourcesFileSearch"] + + +class AssistantUpdateParams(TypedDict, total=False): + description: Optional[str] + """The description of the assistant. The maximum length is 512 characters.""" + + instructions: Optional[str] + """The system instructions that the assistant uses. + + The maximum length is 256,000 characters. + """ + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + model: str + """ID of the model to use. + + You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + """ + + name: Optional[str] + """The name of the assistant. The maximum length is 256 characters.""" + + response_format: Optional[AssistantResponseFormatOptionParam] + """Specifies the format that the model must output. + + Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + """ + + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + """ + + tool_resources: Optional[ToolResources] + """A set of resources that are used by the assistant's tools. + + The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ + + tools: Iterable[AssistantToolParam] + """A list of tool enabled on the assistant. + + There can be a maximum of 128 tools per assistant. Tools can be of types + `code_interpreter`, `file_search`, or `function`. + """ + + top_p: Optional[float] + """ + An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + """ + + +class ToolResourcesCodeInterpreter(TypedDict, total=False): + file_ids: List[str] + """ + Overrides the list of + [file](https://platform.openai.com/docs/api-reference/files) IDs made available + to the `code_interpreter` tool. There can be a maximum of 20 files associated + with the tool. + """ + + +class ToolResourcesFileSearch(TypedDict, total=False): + vector_store_ids: List[str] + """ + Overrides the + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this assistant. There can be a maximum of 1 vector store attached to + the assistant. + """ + + +class ToolResources(TypedDict, total=False): + code_interpreter: ToolResourcesCodeInterpreter + + file_search: ToolResourcesFileSearch diff --git a/.venv/Lib/site-packages/openai/types/beta/chat/__init__.py b/.venv/Lib/site-packages/openai/types/beta/chat/__init__.py new file mode 100644 index 00000000..f8ee8b14 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/chat/__init__.py @@ -0,0 +1,3 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations diff --git a/.venv/Lib/site-packages/openai/types/beta/chat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/chat/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..eee81314 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/chat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/code_interpreter_tool.py b/.venv/Lib/site-packages/openai/types/beta/code_interpreter_tool.py new file mode 100644 index 00000000..17ab3de6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/code_interpreter_tool.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["CodeInterpreterTool"] + + +class CodeInterpreterTool(BaseModel): + type: Literal["code_interpreter"] + """The type of tool being defined: `code_interpreter`""" diff --git a/.venv/Lib/site-packages/openai/types/beta/code_interpreter_tool_param.py b/.venv/Lib/site-packages/openai/types/beta/code_interpreter_tool_param.py new file mode 100644 index 00000000..4f6916d7 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/code_interpreter_tool_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["CodeInterpreterToolParam"] + + +class CodeInterpreterToolParam(TypedDict, total=False): + type: Required[Literal["code_interpreter"]] + """The type of tool being defined: `code_interpreter`""" diff --git a/.venv/Lib/site-packages/openai/types/beta/file_search_tool.py b/.venv/Lib/site-packages/openai/types/beta/file_search_tool.py new file mode 100644 index 00000000..eea55ea6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/file_search_tool.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["FileSearchTool"] + + +class FileSearchTool(BaseModel): + type: Literal["file_search"] + """The type of tool being defined: `file_search`""" diff --git a/.venv/Lib/site-packages/openai/types/beta/file_search_tool_param.py b/.venv/Lib/site-packages/openai/types/beta/file_search_tool_param.py new file mode 100644 index 00000000..d33fd06d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/file_search_tool_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["FileSearchToolParam"] + + +class FileSearchToolParam(TypedDict, total=False): + type: Required[Literal["file_search"]] + """The type of tool being defined: `file_search`""" diff --git a/.venv/Lib/site-packages/openai/types/beta/function_tool.py b/.venv/Lib/site-packages/openai/types/beta/function_tool.py new file mode 100644 index 00000000..f9227678 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/function_tool.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel +from ..shared.function_definition import FunctionDefinition + +__all__ = ["FunctionTool"] + + +class FunctionTool(BaseModel): + function: FunctionDefinition + + type: Literal["function"] + """The type of tool being defined: `function`""" diff --git a/.venv/Lib/site-packages/openai/types/beta/function_tool_param.py b/.venv/Lib/site-packages/openai/types/beta/function_tool_param.py new file mode 100644 index 00000000..b44c0d47 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/function_tool_param.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +from ...types import shared_params + +__all__ = ["FunctionToolParam"] + + +class FunctionToolParam(TypedDict, total=False): + function: Required[shared_params.FunctionDefinition] + + type: Required[Literal["function"]] + """The type of tool being defined: `function`""" diff --git a/.venv/Lib/site-packages/openai/types/beta/thread.py b/.venv/Lib/site-packages/openai/types/beta/thread.py new file mode 100644 index 00000000..6f7a6c7d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/thread.py @@ -0,0 +1,60 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["Thread", "ToolResources", "ToolResourcesCodeInterpreter", "ToolResourcesFileSearch"] + + +class ToolResourcesCodeInterpreter(BaseModel): + file_ids: Optional[List[str]] = None + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + available to the `code_interpreter` tool. There can be a maximum of 20 files + associated with the tool. + """ + + +class ToolResourcesFileSearch(BaseModel): + vector_store_ids: Optional[List[str]] = None + """ + The + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this thread. There can be a maximum of 1 vector store attached to + the thread. + """ + + +class ToolResources(BaseModel): + code_interpreter: Optional[ToolResourcesCodeInterpreter] = None + + file_search: Optional[ToolResourcesFileSearch] = None + + +class Thread(BaseModel): + id: str + """The identifier, which can be referenced in API endpoints.""" + + created_at: int + """The Unix timestamp (in seconds) for when the thread was created.""" + + metadata: Optional[object] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + object: Literal["thread"] + """The object type, which is always `thread`.""" + + tool_resources: Optional[ToolResources] = None + """ + A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/thread_create_and_run_params.py b/.venv/Lib/site-packages/openai/types/beta/thread_create_and_run_params.py new file mode 100644 index 00000000..60510965 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/thread_create_and_run_params.py @@ -0,0 +1,348 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from .function_tool_param import FunctionToolParam +from .file_search_tool_param import FileSearchToolParam +from .code_interpreter_tool_param import CodeInterpreterToolParam +from .assistant_tool_choice_option_param import AssistantToolChoiceOptionParam +from .assistant_response_format_option_param import AssistantResponseFormatOptionParam + +__all__ = [ + "ThreadCreateAndRunParamsBase", + "Thread", + "ThreadMessage", + "ThreadMessageAttachment", + "ThreadMessageAttachmentTool", + "ThreadToolResources", + "ThreadToolResourcesCodeInterpreter", + "ThreadToolResourcesFileSearch", + "ThreadToolResourcesFileSearchVectorStore", + "ToolResources", + "ToolResourcesCodeInterpreter", + "ToolResourcesFileSearch", + "Tool", + "TruncationStrategy", + "ThreadCreateAndRunParamsNonStreaming", + "ThreadCreateAndRunParamsStreaming", +] + + +class ThreadCreateAndRunParamsBase(TypedDict, total=False): + assistant_id: Required[str] + """ + The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + """ + + instructions: Optional[str] + """Override the default system message of the assistant. + + This is useful for modifying the behavior on a per-run basis. + """ + + max_completion_tokens: Optional[int] + """ + The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + """ + + max_prompt_tokens: Optional[int] + """The maximum number of prompt tokens that may be used over the course of the run. + + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + """ + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + """ + The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + """ + + response_format: Optional[AssistantResponseFormatOptionParam] + """Specifies the format that the model must output. + + Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + """ + + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + """ + + thread: Thread + """If no thread is provided, an empty thread will be created.""" + + tool_choice: Optional[AssistantToolChoiceOptionParam] + """ + Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + """ + + tool_resources: Optional[ToolResources] + """A set of resources that are used by the assistant's tools. + + The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ + + tools: Optional[Iterable[Tool]] + """Override the tools the assistant can use for this run. + + This is useful for modifying the behavior on a per-run basis. + """ + + top_p: Optional[float] + """ + An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + """ + + truncation_strategy: Optional[TruncationStrategy] + """Controls for how a thread will be truncated prior to the run. + + Use this to control the intial context window of the run. + """ + + +ThreadMessageAttachmentTool = Union[CodeInterpreterToolParam, FileSearchToolParam] + + +class ThreadMessageAttachment(TypedDict, total=False): + file_id: str + """The ID of the file to attach to the message.""" + + tools: Iterable[ThreadMessageAttachmentTool] + """The tools to add this file to.""" + + +class ThreadMessage(TypedDict, total=False): + content: Required[str] + """The content of the message.""" + + role: Required[Literal["user", "assistant"]] + """The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. + """ + + attachments: Optional[Iterable[ThreadMessageAttachment]] + """A list of files attached to the message, and the tools they should be added to.""" + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + +class ThreadToolResourcesCodeInterpreter(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + available to the `code_interpreter` tool. There can be a maximum of 20 files + associated with the tool. + """ + + +class ThreadToolResourcesFileSearchVectorStore(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + add to the vector store. There can be a maximum of 10000 files in a vector + store. + """ + + metadata: object + """Set of 16 key-value pairs that can be attached to a vector store. + + This can be useful for storing additional information about the vector store in + a structured format. Keys can be a maximum of 64 characters long and values can + be a maxium of 512 characters long. + """ + + +class ThreadToolResourcesFileSearch(TypedDict, total=False): + vector_store_ids: List[str] + """ + The + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this thread. There can be a maximum of 1 vector store attached to + the thread. + """ + + vector_stores: Iterable[ThreadToolResourcesFileSearchVectorStore] + """ + A helper to create a + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + with file_ids and attach it to this thread. There can be a maximum of 1 vector + store attached to the thread. + """ + + +class ThreadToolResources(TypedDict, total=False): + code_interpreter: ThreadToolResourcesCodeInterpreter + + file_search: ThreadToolResourcesFileSearch + + +class Thread(TypedDict, total=False): + messages: Iterable[ThreadMessage] + """ + A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + start the thread with. + """ + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + tool_resources: Optional[ThreadToolResources] + """ + A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ + + +class ToolResourcesCodeInterpreter(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + available to the `code_interpreter` tool. There can be a maximum of 20 files + associated with the tool. + """ + + +class ToolResourcesFileSearch(TypedDict, total=False): + vector_store_ids: List[str] + """ + The ID of the + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this assistant. There can be a maximum of 1 vector store attached to + the assistant. + """ + + +class ToolResources(TypedDict, total=False): + code_interpreter: ToolResourcesCodeInterpreter + + file_search: ToolResourcesFileSearch + + +Tool = Union[CodeInterpreterToolParam, FileSearchToolParam, FunctionToolParam] + + +class TruncationStrategy(TypedDict, total=False): + type: Required[Literal["auto", "last_messages"]] + """The truncation strategy to use for the thread. + + The default is `auto`. If set to `last_messages`, the thread will be truncated + to the n most recent messages in the thread. When set to `auto`, messages in the + middle of the thread will be dropped to fit the context length of the model, + `max_prompt_tokens`. + """ + + last_messages: Optional[int] + """ + The number of most recent messages from the thread when constructing the context + for the run. + """ + + +class ThreadCreateAndRunParamsNonStreaming(ThreadCreateAndRunParamsBase): + stream: Optional[Literal[False]] + """ + If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + """ + + +class ThreadCreateAndRunParamsStreaming(ThreadCreateAndRunParamsBase): + stream: Required[Literal[True]] + """ + If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + """ + + +ThreadCreateAndRunParams = Union[ThreadCreateAndRunParamsNonStreaming, ThreadCreateAndRunParamsStreaming] diff --git a/.venv/Lib/site-packages/openai/types/beta/thread_create_params.py b/.venv/Lib/site-packages/openai/types/beta/thread_create_params.py new file mode 100644 index 00000000..ab2df21e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/thread_create_params.py @@ -0,0 +1,130 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from .file_search_tool_param import FileSearchToolParam +from .code_interpreter_tool_param import CodeInterpreterToolParam + +__all__ = [ + "ThreadCreateParams", + "Message", + "MessageAttachment", + "MessageAttachmentTool", + "ToolResources", + "ToolResourcesCodeInterpreter", + "ToolResourcesFileSearch", + "ToolResourcesFileSearchVectorStore", +] + + +class ThreadCreateParams(TypedDict, total=False): + messages: Iterable[Message] + """ + A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + start the thread with. + """ + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + tool_resources: Optional[ToolResources] + """ + A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ + + +MessageAttachmentTool = Union[CodeInterpreterToolParam, FileSearchToolParam] + + +class MessageAttachment(TypedDict, total=False): + file_id: str + """The ID of the file to attach to the message.""" + + tools: Iterable[MessageAttachmentTool] + """The tools to add this file to.""" + + +class Message(TypedDict, total=False): + content: Required[str] + """The content of the message.""" + + role: Required[Literal["user", "assistant"]] + """The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. + """ + + attachments: Optional[Iterable[MessageAttachment]] + """A list of files attached to the message, and the tools they should be added to.""" + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + +class ToolResourcesCodeInterpreter(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + available to the `code_interpreter` tool. There can be a maximum of 20 files + associated with the tool. + """ + + +class ToolResourcesFileSearchVectorStore(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + add to the vector store. There can be a maximum of 10000 files in a vector + store. + """ + + metadata: object + """Set of 16 key-value pairs that can be attached to a vector store. + + This can be useful for storing additional information about the vector store in + a structured format. Keys can be a maximum of 64 characters long and values can + be a maxium of 512 characters long. + """ + + +class ToolResourcesFileSearch(TypedDict, total=False): + vector_store_ids: List[str] + """ + The + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this thread. There can be a maximum of 1 vector store attached to + the thread. + """ + + vector_stores: Iterable[ToolResourcesFileSearchVectorStore] + """ + A helper to create a + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + with file_ids and attach it to this thread. There can be a maximum of 1 vector + store attached to the thread. + """ + + +class ToolResources(TypedDict, total=False): + code_interpreter: ToolResourcesCodeInterpreter + + file_search: ToolResourcesFileSearch diff --git a/.venv/Lib/site-packages/openai/types/beta/thread_deleted.py b/.venv/Lib/site-packages/openai/types/beta/thread_deleted.py new file mode 100644 index 00000000..d3856263 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/thread_deleted.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["ThreadDeleted"] + + +class ThreadDeleted(BaseModel): + id: str + + deleted: bool + + object: Literal["thread.deleted"] diff --git a/.venv/Lib/site-packages/openai/types/beta/thread_update_params.py b/.venv/Lib/site-packages/openai/types/beta/thread_update_params.py new file mode 100644 index 00000000..7210ab77 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/thread_update_params.py @@ -0,0 +1,51 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from typing_extensions import TypedDict + +__all__ = ["ThreadUpdateParams", "ToolResources", "ToolResourcesCodeInterpreter", "ToolResourcesFileSearch"] + + +class ThreadUpdateParams(TypedDict, total=False): + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + tool_resources: Optional[ToolResources] + """ + A set of resources that are made available to the assistant's tools in this + thread. The resources are specific to the type of tool. For example, the + `code_interpreter` tool requires a list of file IDs, while the `file_search` + tool requires a list of vector store IDs. + """ + + +class ToolResourcesCodeInterpreter(TypedDict, total=False): + file_ids: List[str] + """ + A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + available to the `code_interpreter` tool. There can be a maximum of 20 files + associated with the tool. + """ + + +class ToolResourcesFileSearch(TypedDict, total=False): + vector_store_ids: List[str] + """ + The + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + attached to this thread. There can be a maximum of 1 vector store attached to + the thread. + """ + + +class ToolResources(TypedDict, total=False): + code_interpreter: ToolResourcesCodeInterpreter + + file_search: ToolResourcesFileSearch diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__init__.py b/.venv/Lib/site-packages/openai/types/beta/threads/__init__.py new file mode 100644 index 00000000..1e38d5ea --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/__init__.py @@ -0,0 +1,34 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .run import Run as Run +from .text import Text as Text +from .message import Message as Message +from .annotation import Annotation as Annotation +from .image_file import ImageFile as ImageFile +from .run_status import RunStatus as RunStatus +from .text_delta import TextDelta as TextDelta +from .message_delta import MessageDelta as MessageDelta +from .message_content import MessageContent as MessageContent +from .message_deleted import MessageDeleted as MessageDeleted +from .run_list_params import RunListParams as RunListParams +from .annotation_delta import AnnotationDelta as AnnotationDelta +from .image_file_delta import ImageFileDelta as ImageFileDelta +from .text_delta_block import TextDeltaBlock as TextDeltaBlock +from .run_create_params import RunCreateParams as RunCreateParams +from .run_update_params import RunUpdateParams as RunUpdateParams +from .text_content_block import TextContentBlock as TextContentBlock +from .message_delta_event import MessageDeltaEvent as MessageDeltaEvent +from .message_list_params import MessageListParams as MessageListParams +from .file_path_annotation import FilePathAnnotation as FilePathAnnotation +from .message_content_delta import MessageContentDelta as MessageContentDelta +from .message_create_params import MessageCreateParams as MessageCreateParams +from .message_update_params import MessageUpdateParams as MessageUpdateParams +from .image_file_delta_block import ImageFileDeltaBlock as ImageFileDeltaBlock +from .file_citation_annotation import FileCitationAnnotation as FileCitationAnnotation +from .image_file_content_block import ImageFileContentBlock as ImageFileContentBlock +from .file_path_delta_annotation import FilePathDeltaAnnotation as FilePathDeltaAnnotation +from .file_citation_delta_annotation import FileCitationDeltaAnnotation as FileCitationDeltaAnnotation +from .run_submit_tool_outputs_params import RunSubmitToolOutputsParams as RunSubmitToolOutputsParams +from .required_action_function_tool_call import RequiredActionFunctionToolCall as RequiredActionFunctionToolCall diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..206520e4 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/annotation.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/annotation.cpython-311.pyc new file mode 100644 index 00000000..fe3f7f21 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/annotation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/annotation_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/annotation_delta.cpython-311.pyc new file mode 100644 index 00000000..8ce3e5fb Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/annotation_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_citation_annotation.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_citation_annotation.cpython-311.pyc new file mode 100644 index 00000000..a0450825 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_citation_annotation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_citation_delta_annotation.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_citation_delta_annotation.cpython-311.pyc new file mode 100644 index 00000000..5e901d5f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_citation_delta_annotation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_path_annotation.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_path_annotation.cpython-311.pyc new file mode 100644 index 00000000..1ce163d3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_path_annotation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_path_delta_annotation.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_path_delta_annotation.cpython-311.pyc new file mode 100644 index 00000000..96d8154e Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/file_path_delta_annotation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file.cpython-311.pyc new file mode 100644 index 00000000..ddcc9403 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_content_block.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_content_block.cpython-311.pyc new file mode 100644 index 00000000..af0c713a Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_content_block.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_delta.cpython-311.pyc new file mode 100644 index 00000000..c79d1e21 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_delta_block.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_delta_block.cpython-311.pyc new file mode 100644 index 00000000..25f6e06a Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/image_file_delta_block.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message.cpython-311.pyc new file mode 100644 index 00000000..24687726 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_content.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_content.cpython-311.pyc new file mode 100644 index 00000000..df4f63f9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_content.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_content_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_content_delta.cpython-311.pyc new file mode 100644 index 00000000..89496844 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_content_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_create_params.cpython-311.pyc new file mode 100644 index 00000000..c87da808 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_deleted.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_deleted.cpython-311.pyc new file mode 100644 index 00000000..92e8023e Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_deleted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_delta.cpython-311.pyc new file mode 100644 index 00000000..5f11c8a8 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_delta_event.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_delta_event.cpython-311.pyc new file mode 100644 index 00000000..812d004b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_delta_event.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_list_params.cpython-311.pyc new file mode 100644 index 00000000..20e336ca Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_update_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_update_params.cpython-311.pyc new file mode 100644 index 00000000..2a9fce17 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/message_update_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/required_action_function_tool_call.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/required_action_function_tool_call.cpython-311.pyc new file mode 100644 index 00000000..5ea608c9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/required_action_function_tool_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run.cpython-311.pyc new file mode 100644 index 00000000..f476dbb3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_create_params.cpython-311.pyc new file mode 100644 index 00000000..0ce9f026 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_list_params.cpython-311.pyc new file mode 100644 index 00000000..0253b112 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_status.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_status.cpython-311.pyc new file mode 100644 index 00000000..1bf51713 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_status.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_submit_tool_outputs_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_submit_tool_outputs_params.cpython-311.pyc new file mode 100644 index 00000000..df8c2150 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_submit_tool_outputs_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_update_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_update_params.cpython-311.pyc new file mode 100644 index 00000000..9ccebc90 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/run_update_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text.cpython-311.pyc new file mode 100644 index 00000000..b6613eb2 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_content_block.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_content_block.cpython-311.pyc new file mode 100644 index 00000000..7ffb7d18 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_content_block.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_delta.cpython-311.pyc new file mode 100644 index 00000000..86de0b5d Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_delta_block.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_delta_block.cpython-311.pyc new file mode 100644 index 00000000..aff4173c Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/__pycache__/text_delta_block.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/annotation.py b/.venv/Lib/site-packages/openai/types/beta/threads/annotation.py new file mode 100644 index 00000000..31e228c8 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/annotation.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated + +from ...._utils import PropertyInfo +from .file_path_annotation import FilePathAnnotation +from .file_citation_annotation import FileCitationAnnotation + +__all__ = ["Annotation"] + +Annotation = Annotated[Union[FileCitationAnnotation, FilePathAnnotation], PropertyInfo(discriminator="type")] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/annotation_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/annotation_delta.py new file mode 100644 index 00000000..91242967 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/annotation_delta.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated + +from ...._utils import PropertyInfo +from .file_path_delta_annotation import FilePathDeltaAnnotation +from .file_citation_delta_annotation import FileCitationDeltaAnnotation + +__all__ = ["AnnotationDelta"] + +AnnotationDelta = Annotated[ + Union[FileCitationDeltaAnnotation, FilePathDeltaAnnotation], PropertyInfo(discriminator="type") +] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/file_citation_annotation.py b/.venv/Lib/site-packages/openai/types/beta/threads/file_citation_annotation.py new file mode 100644 index 00000000..68571cd4 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/file_citation_annotation.py @@ -0,0 +1,29 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["FileCitationAnnotation", "FileCitation"] + + +class FileCitation(BaseModel): + file_id: str + """The ID of the specific File the citation is from.""" + + quote: str + """The specific quote in the file.""" + + +class FileCitationAnnotation(BaseModel): + end_index: int + + file_citation: FileCitation + + start_index: int + + text: str + """The text in the message content that needs to be replaced.""" + + type: Literal["file_citation"] + """Always `file_citation`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/file_citation_delta_annotation.py b/.venv/Lib/site-packages/openai/types/beta/threads/file_citation_delta_annotation.py new file mode 100644 index 00000000..b40c0d12 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/file_citation_delta_annotation.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["FileCitationDeltaAnnotation", "FileCitation"] + + +class FileCitation(BaseModel): + file_id: Optional[str] = None + """The ID of the specific File the citation is from.""" + + quote: Optional[str] = None + """The specific quote in the file.""" + + +class FileCitationDeltaAnnotation(BaseModel): + index: int + """The index of the annotation in the text content part.""" + + type: Literal["file_citation"] + """Always `file_citation`.""" + + end_index: Optional[int] = None + + file_citation: Optional[FileCitation] = None + + start_index: Optional[int] = None + + text: Optional[str] = None + """The text in the message content that needs to be replaced.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/file_path_annotation.py b/.venv/Lib/site-packages/openai/types/beta/threads/file_path_annotation.py new file mode 100644 index 00000000..9812737e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/file_path_annotation.py @@ -0,0 +1,26 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["FilePathAnnotation", "FilePath"] + + +class FilePath(BaseModel): + file_id: str + """The ID of the file that was generated.""" + + +class FilePathAnnotation(BaseModel): + end_index: int + + file_path: FilePath + + start_index: int + + text: str + """The text in the message content that needs to be replaced.""" + + type: Literal["file_path"] + """Always `file_path`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/file_path_delta_annotation.py b/.venv/Lib/site-packages/openai/types/beta/threads/file_path_delta_annotation.py new file mode 100644 index 00000000..0cbb445e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/file_path_delta_annotation.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["FilePathDeltaAnnotation", "FilePath"] + + +class FilePath(BaseModel): + file_id: Optional[str] = None + """The ID of the file that was generated.""" + + +class FilePathDeltaAnnotation(BaseModel): + index: int + """The index of the annotation in the text content part.""" + + type: Literal["file_path"] + """Always `file_path`.""" + + end_index: Optional[int] = None + + file_path: Optional[FilePath] = None + + start_index: Optional[int] = None + + text: Optional[str] = None + """The text in the message content that needs to be replaced.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/image_file.py b/.venv/Lib/site-packages/openai/types/beta/threads/image_file.py new file mode 100644 index 00000000..651a247d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/image_file.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from ...._models import BaseModel + +__all__ = ["ImageFile"] + + +class ImageFile(BaseModel): + file_id: str + """ + The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + in the message content. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/image_file_content_block.py b/.venv/Lib/site-packages/openai/types/beta/threads/image_file_content_block.py new file mode 100644 index 00000000..a9099990 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/image_file_content_block.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel +from .image_file import ImageFile + +__all__ = ["ImageFileContentBlock"] + + +class ImageFileContentBlock(BaseModel): + image_file: ImageFile + + type: Literal["image_file"] + """Always `image_file`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/image_file_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/image_file_delta.py new file mode 100644 index 00000000..b0b1d32f --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/image_file_delta.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ...._models import BaseModel + +__all__ = ["ImageFileDelta"] + + +class ImageFileDelta(BaseModel): + file_id: Optional[str] = None + """ + The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + in the message content. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/image_file_delta_block.py b/.venv/Lib/site-packages/openai/types/beta/threads/image_file_delta_block.py new file mode 100644 index 00000000..0a5a2e8a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/image_file_delta_block.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ...._models import BaseModel +from .image_file_delta import ImageFileDelta + +__all__ = ["ImageFileDeltaBlock"] + + +class ImageFileDeltaBlock(BaseModel): + index: int + """The index of the content part in the message.""" + + type: Literal["image_file"] + """Always `image_file`.""" + + image_file: Optional[ImageFileDelta] = None diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message.py b/.venv/Lib/site-packages/openai/types/beta/threads/message.py new file mode 100644 index 00000000..ebaabdb0 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message.py @@ -0,0 +1,89 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Union, Optional +from typing_extensions import Literal + +from ...._models import BaseModel +from .message_content import MessageContent +from ..file_search_tool import FileSearchTool +from ..code_interpreter_tool import CodeInterpreterTool + +__all__ = ["Message", "Attachment", "AttachmentTool", "IncompleteDetails"] + +AttachmentTool = Union[CodeInterpreterTool, FileSearchTool] + + +class Attachment(BaseModel): + file_id: Optional[str] = None + """The ID of the file to attach to the message.""" + + tools: Optional[List[AttachmentTool]] = None + """The tools to add this file to.""" + + +class IncompleteDetails(BaseModel): + reason: Literal["content_filter", "max_tokens", "run_cancelled", "run_expired", "run_failed"] + """The reason the message is incomplete.""" + + +class Message(BaseModel): + id: str + """The identifier, which can be referenced in API endpoints.""" + + assistant_id: Optional[str] = None + """ + If applicable, the ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) that + authored this message. + """ + + attachments: Optional[List[Attachment]] = None + """A list of files attached to the message, and the tools they were added to.""" + + completed_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the message was completed.""" + + content: List[MessageContent] + """The content of the message in array of text and/or images.""" + + created_at: int + """The Unix timestamp (in seconds) for when the message was created.""" + + incomplete_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the message was marked as incomplete.""" + + incomplete_details: Optional[IncompleteDetails] = None + """On an incomplete message, details about why the message is incomplete.""" + + metadata: Optional[object] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + object: Literal["thread.message"] + """The object type, which is always `thread.message`.""" + + role: Literal["user", "assistant"] + """The entity that produced the message. One of `user` or `assistant`.""" + + run_id: Optional[str] = None + """ + The ID of the [run](https://platform.openai.com/docs/api-reference/runs) + associated with the creation of this message. Value is `null` when messages are + created manually using the create message or create thread endpoints. + """ + + status: Literal["in_progress", "incomplete", "completed"] + """ + The status of the message, which can be either `in_progress`, `incomplete`, or + `completed`. + """ + + thread_id: str + """ + The [thread](https://platform.openai.com/docs/api-reference/threads) ID that + this message belongs to. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_content.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_content.py new file mode 100644 index 00000000..bc79b39f --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_content.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated + +from ...._utils import PropertyInfo +from .text_content_block import TextContentBlock +from .image_file_content_block import ImageFileContentBlock + +__all__ = ["MessageContent"] + +MessageContent = Annotated[Union[ImageFileContentBlock, TextContentBlock], PropertyInfo(discriminator="type")] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_content_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_content_delta.py new file mode 100644 index 00000000..3cbc22c9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_content_delta.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated + +from ...._utils import PropertyInfo +from .text_delta_block import TextDeltaBlock +from .image_file_delta_block import ImageFileDeltaBlock + +__all__ = ["MessageContentDelta"] + +MessageContentDelta = Annotated[Union[ImageFileDeltaBlock, TextDeltaBlock], PropertyInfo(discriminator="type")] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_create_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_create_params.py new file mode 100644 index 00000000..5cead598 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_create_params.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from ..file_search_tool_param import FileSearchToolParam +from ..code_interpreter_tool_param import CodeInterpreterToolParam + +__all__ = ["MessageCreateParams", "Attachment", "AttachmentTool"] + + +class MessageCreateParams(TypedDict, total=False): + content: Required[str] + """The content of the message.""" + + role: Required[Literal["user", "assistant"]] + """The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. + """ + + attachments: Optional[Iterable[Attachment]] + """A list of files attached to the message, and the tools they should be added to.""" + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + +AttachmentTool = Union[CodeInterpreterToolParam, FileSearchToolParam] + + +class Attachment(TypedDict, total=False): + file_id: str + """The ID of the file to attach to the message.""" + + tools: Iterable[AttachmentTool] + """The tools to add this file to.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_deleted.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_deleted.py new file mode 100644 index 00000000..48210777 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_deleted.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["MessageDeleted"] + + +class MessageDeleted(BaseModel): + id: str + + deleted: bool + + object: Literal["thread.message.deleted"] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_delta.py new file mode 100644 index 00000000..ecd0dfe3 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_delta.py @@ -0,0 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ...._models import BaseModel +from .message_content_delta import MessageContentDelta + +__all__ = ["MessageDelta"] + + +class MessageDelta(BaseModel): + content: Optional[List[MessageContentDelta]] = None + """The content of the message in array of text and/or images.""" + + role: Optional[Literal["user", "assistant"]] = None + """The entity that produced the message. One of `user` or `assistant`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_delta_event.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_delta_event.py new file mode 100644 index 00000000..3811cef6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_delta_event.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel +from .message_delta import MessageDelta + +__all__ = ["MessageDeltaEvent"] + + +class MessageDeltaEvent(BaseModel): + id: str + """The identifier of the message, which can be referenced in API endpoints.""" + + delta: MessageDelta + """The delta containing the fields that have changed on the Message.""" + + object: Literal["thread.message.delta"] + """The object type, which is always `thread.message.delta`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_list_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_list_params.py new file mode 100644 index 00000000..18c2442f --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_list_params.py @@ -0,0 +1,42 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["MessageListParams"] + + +class MessageListParams(TypedDict, total=False): + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + before: str + """A cursor for use in pagination. + + `before` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page + of the list. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ + + order: Literal["asc", "desc"] + """Sort order by the `created_at` timestamp of the objects. + + `asc` for ascending order and `desc` for descending order. + """ + + run_id: str + """Filter messages by the run ID that generated them.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/message_update_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/message_update_params.py new file mode 100644 index 00000000..7000f331 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/message_update_params.py @@ -0,0 +1,20 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Required, TypedDict + +__all__ = ["MessageUpdateParams"] + + +class MessageUpdateParams(TypedDict, total=False): + thread_id: Required[str] + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/required_action_function_tool_call.py b/.venv/Lib/site-packages/openai/types/beta/threads/required_action_function_tool_call.py new file mode 100644 index 00000000..a24dfd06 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/required_action_function_tool_call.py @@ -0,0 +1,34 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["RequiredActionFunctionToolCall", "Function"] + + +class Function(BaseModel): + arguments: str + """The arguments that the model expects you to pass to the function.""" + + name: str + """The name of the function.""" + + +class RequiredActionFunctionToolCall(BaseModel): + id: str + """The ID of the tool call. + + This ID must be referenced when you submit the tool outputs in using the + [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + endpoint. + """ + + function: Function + """The function definition.""" + + type: Literal["function"] + """The type of tool call the output is required for. + + For now, this is always `function`. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/run.py b/.venv/Lib/site-packages/openai/types/beta/threads/run.py new file mode 100644 index 00000000..6c118f27 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/run.py @@ -0,0 +1,230 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ...._models import BaseModel +from .run_status import RunStatus +from ..assistant_tool import AssistantTool +from ..assistant_tool_choice_option import AssistantToolChoiceOption +from ..assistant_response_format_option import AssistantResponseFormatOption +from .required_action_function_tool_call import RequiredActionFunctionToolCall + +__all__ = [ + "Run", + "IncompleteDetails", + "LastError", + "RequiredAction", + "RequiredActionSubmitToolOutputs", + "TruncationStrategy", + "Usage", +] + + +class IncompleteDetails(BaseModel): + reason: Optional[Literal["max_completion_tokens", "max_prompt_tokens"]] = None + """The reason why the run is incomplete. + + This will point to which specific token limit was reached over the course of the + run. + """ + + +class LastError(BaseModel): + code: Literal["server_error", "rate_limit_exceeded", "invalid_prompt"] + """One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`.""" + + message: str + """A human-readable description of the error.""" + + +class RequiredActionSubmitToolOutputs(BaseModel): + tool_calls: List[RequiredActionFunctionToolCall] + """A list of the relevant tool calls.""" + + +class RequiredAction(BaseModel): + submit_tool_outputs: RequiredActionSubmitToolOutputs + """Details on the tool outputs needed for this run to continue.""" + + type: Literal["submit_tool_outputs"] + """For now, this is always `submit_tool_outputs`.""" + + +class TruncationStrategy(BaseModel): + type: Literal["auto", "last_messages"] + """The truncation strategy to use for the thread. + + The default is `auto`. If set to `last_messages`, the thread will be truncated + to the n most recent messages in the thread. When set to `auto`, messages in the + middle of the thread will be dropped to fit the context length of the model, + `max_prompt_tokens`. + """ + + last_messages: Optional[int] = None + """ + The number of most recent messages from the thread when constructing the context + for the run. + """ + + +class Usage(BaseModel): + completion_tokens: int + """Number of completion tokens used over the course of the run.""" + + prompt_tokens: int + """Number of prompt tokens used over the course of the run.""" + + total_tokens: int + """Total number of tokens used (prompt + completion).""" + + +class Run(BaseModel): + id: str + """The identifier, which can be referenced in API endpoints.""" + + assistant_id: str + """ + The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + execution of this run. + """ + + cancelled_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run was cancelled.""" + + completed_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run was completed.""" + + created_at: int + """The Unix timestamp (in seconds) for when the run was created.""" + + expires_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run will expire.""" + + failed_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run failed.""" + + incomplete_details: Optional[IncompleteDetails] = None + """Details on why the run is incomplete. + + Will be `null` if the run is not incomplete. + """ + + instructions: str + """ + The instructions that the + [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + this run. + """ + + last_error: Optional[LastError] = None + """The last error associated with this run. Will be `null` if there are no errors.""" + + max_completion_tokens: Optional[int] = None + """ + The maximum number of completion tokens specified to have been used over the + course of the run. + """ + + max_prompt_tokens: Optional[int] = None + """ + The maximum number of prompt tokens specified to have been used over the course + of the run. + """ + + metadata: Optional[object] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + model: str + """ + The model that the + [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + this run. + """ + + object: Literal["thread.run"] + """The object type, which is always `thread.run`.""" + + required_action: Optional[RequiredAction] = None + """Details on the action required to continue the run. + + Will be `null` if no action is required. + """ + + response_format: Optional[AssistantResponseFormatOption] = None + """Specifies the format that the model must output. + + Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + """ + + started_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run was started.""" + + status: RunStatus + """ + The status of the run, which can be either `queued`, `in_progress`, + `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, or + `expired`. + """ + + thread_id: str + """ + The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + that was executed on as a part of this run. + """ + + tool_choice: Optional[AssistantToolChoiceOption] = None + """ + Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + """ + + tools: List[AssistantTool] + """ + The list of tools that the + [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + this run. + """ + + truncation_strategy: Optional[TruncationStrategy] = None + """Controls for how a thread will be truncated prior to the run. + + Use this to control the intial context window of the run. + """ + + usage: Optional[Usage] = None + """Usage statistics related to the run. + + This value will be `null` if the run is not in a terminal state (i.e. + `in_progress`, `queued`, etc.). + """ + + temperature: Optional[float] = None + """The sampling temperature used for this run. If not set, defaults to 1.""" + + top_p: Optional[float] = None + """The nucleus sampling value used for this run. If not set, defaults to 1.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/run_create_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/run_create_params.py new file mode 100644 index 00000000..2e4823ba --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/run_create_params.py @@ -0,0 +1,237 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from ..assistant_tool_param import AssistantToolParam +from ..file_search_tool_param import FileSearchToolParam +from ..code_interpreter_tool_param import CodeInterpreterToolParam +from ..assistant_tool_choice_option_param import AssistantToolChoiceOptionParam +from ..assistant_response_format_option_param import AssistantResponseFormatOptionParam + +__all__ = [ + "RunCreateParamsBase", + "AdditionalMessage", + "AdditionalMessageAttachment", + "AdditionalMessageAttachmentTool", + "TruncationStrategy", + "RunCreateParamsNonStreaming", + "RunCreateParamsStreaming", +] + + +class RunCreateParamsBase(TypedDict, total=False): + assistant_id: Required[str] + """ + The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + execute this run. + """ + + additional_instructions: Optional[str] + """Appends additional instructions at the end of the instructions for the run. + + This is useful for modifying the behavior on a per-run basis without overriding + other instructions. + """ + + additional_messages: Optional[Iterable[AdditionalMessage]] + """Adds additional messages to the thread before creating the run.""" + + instructions: Optional[str] + """ + Overrides the + [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + of the assistant. This is useful for modifying the behavior on a per-run basis. + """ + + max_completion_tokens: Optional[int] + """ + The maximum number of completion tokens that may be used over the course of the + run. The run will make a best effort to use only the number of completion tokens + specified, across multiple turns of the run. If the run exceeds the number of + completion tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + """ + + max_prompt_tokens: Optional[int] + """The maximum number of prompt tokens that may be used over the course of the run. + + The run will make a best effort to use only the number of prompt tokens + specified, across multiple turns of the run. If the run exceeds the number of + prompt tokens specified, the run will end with status `incomplete`. See + `incomplete_details` for more info. + """ + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + model: Union[ + str, + Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", + ], + None, + ] + """ + The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + be used to execute this run. If a value is provided here, it will override the + model associated with the assistant. If not, the model associated with the + assistant will be used. + """ + + response_format: Optional[AssistantResponseFormatOptionParam] + """Specifies the format that the model must output. + + Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + """ + + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + """ + + tool_choice: Optional[AssistantToolChoiceOptionParam] + """ + Controls which (if any) tool is called by the model. `none` means the model will + not call any tools and instead generates a message. `auto` is the default value + and means the model can pick between generating a message or calling one or more + tools. `required` means the model must call one or more tools before responding + to the user. Specifying a particular tool like `{"type": "file_search"}` or + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + """ + + tools: Optional[Iterable[AssistantToolParam]] + """Override the tools the assistant can use for this run. + + This is useful for modifying the behavior on a per-run basis. + """ + + top_p: Optional[float] + """ + An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or temperature but not both. + """ + + truncation_strategy: Optional[TruncationStrategy] + """Controls for how a thread will be truncated prior to the run. + + Use this to control the intial context window of the run. + """ + + +AdditionalMessageAttachmentTool = Union[CodeInterpreterToolParam, FileSearchToolParam] + + +class AdditionalMessageAttachment(TypedDict, total=False): + file_id: str + """The ID of the file to attach to the message.""" + + tools: Iterable[AdditionalMessageAttachmentTool] + """The tools to add this file to.""" + + +class AdditionalMessage(TypedDict, total=False): + content: Required[str] + """The content of the message.""" + + role: Required[Literal["user", "assistant"]] + """The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. + """ + + attachments: Optional[Iterable[AdditionalMessageAttachment]] + """A list of files attached to the message, and the tools they should be added to.""" + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + +class TruncationStrategy(TypedDict, total=False): + type: Required[Literal["auto", "last_messages"]] + """The truncation strategy to use for the thread. + + The default is `auto`. If set to `last_messages`, the thread will be truncated + to the n most recent messages in the thread. When set to `auto`, messages in the + middle of the thread will be dropped to fit the context length of the model, + `max_prompt_tokens`. + """ + + last_messages: Optional[int] + """ + The number of most recent messages from the thread when constructing the context + for the run. + """ + + +class RunCreateParamsNonStreaming(RunCreateParamsBase): + stream: Optional[Literal[False]] + """ + If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + """ + + +class RunCreateParamsStreaming(RunCreateParamsBase): + stream: Required[Literal[True]] + """ + If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + """ + + +RunCreateParams = Union[RunCreateParamsNonStreaming, RunCreateParamsStreaming] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/run_list_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/run_list_params.py new file mode 100644 index 00000000..1e32bca4 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/run_list_params.py @@ -0,0 +1,39 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["RunListParams"] + + +class RunListParams(TypedDict, total=False): + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + before: str + """A cursor for use in pagination. + + `before` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page + of the list. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ + + order: Literal["asc", "desc"] + """Sort order by the `created_at` timestamp of the objects. + + `asc` for ascending order and `desc` for descending order. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/run_status.py b/.venv/Lib/site-packages/openai/types/beta/threads/run_status.py new file mode 100644 index 00000000..bf9b4e7b --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/run_status.py @@ -0,0 +1,9 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +__all__ = ["RunStatus"] + +RunStatus = Literal[ + "queued", "in_progress", "requires_action", "cancelling", "cancelled", "failed", "completed", "expired" +] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/run_submit_tool_outputs_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/run_submit_tool_outputs_params.py new file mode 100644 index 00000000..ccb5e5e9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/run_submit_tool_outputs_params.py @@ -0,0 +1,52 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = [ + "RunSubmitToolOutputsParamsBase", + "ToolOutput", + "RunSubmitToolOutputsParamsNonStreaming", + "RunSubmitToolOutputsParamsStreaming", +] + + +class RunSubmitToolOutputsParamsBase(TypedDict, total=False): + thread_id: Required[str] + + tool_outputs: Required[Iterable[ToolOutput]] + """A list of tools for which the outputs are being submitted.""" + + +class ToolOutput(TypedDict, total=False): + output: str + """The output of the tool call to be submitted to continue the run.""" + + tool_call_id: str + """ + The ID of the tool call in the `required_action` object within the run object + the output is being submitted for. + """ + + +class RunSubmitToolOutputsParamsNonStreaming(RunSubmitToolOutputsParamsBase): + stream: Optional[Literal[False]] + """ + If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + """ + + +class RunSubmitToolOutputsParamsStreaming(RunSubmitToolOutputsParamsBase): + stream: Required[Literal[True]] + """ + If `true`, returns a stream of events that happen during the Run as server-sent + events, terminating when the Run enters a terminal state with a `data: [DONE]` + message. + """ + + +RunSubmitToolOutputsParams = Union[RunSubmitToolOutputsParamsNonStreaming, RunSubmitToolOutputsParamsStreaming] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/run_update_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/run_update_params.py new file mode 100644 index 00000000..e595eac8 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/run_update_params.py @@ -0,0 +1,20 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Required, TypedDict + +__all__ = ["RunUpdateParams"] + + +class RunUpdateParams(TypedDict, total=False): + thread_id: Required[str] + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__init__.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__init__.py new file mode 100644 index 00000000..a312ce3d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__init__.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .run_step import RunStep as RunStep +from .tool_call import ToolCall as ToolCall +from .run_step_delta import RunStepDelta as RunStepDelta +from .tool_call_delta import ToolCallDelta as ToolCallDelta +from .step_list_params import StepListParams as StepListParams +from .function_tool_call import FunctionToolCall as FunctionToolCall +from .run_step_delta_event import RunStepDeltaEvent as RunStepDeltaEvent +from .code_interpreter_logs import CodeInterpreterLogs as CodeInterpreterLogs +from .file_search_tool_call import FileSearchToolCall as FileSearchToolCall +from .tool_call_delta_object import ToolCallDeltaObject as ToolCallDeltaObject +from .tool_calls_step_details import ToolCallsStepDetails as ToolCallsStepDetails +from .function_tool_call_delta import FunctionToolCallDelta as FunctionToolCallDelta +from .code_interpreter_tool_call import CodeInterpreterToolCall as CodeInterpreterToolCall +from .file_search_tool_call_delta import FileSearchToolCallDelta as FileSearchToolCallDelta +from .run_step_delta_message_delta import RunStepDeltaMessageDelta as RunStepDeltaMessageDelta +from .code_interpreter_output_image import CodeInterpreterOutputImage as CodeInterpreterOutputImage +from .message_creation_step_details import MessageCreationStepDetails as MessageCreationStepDetails +from .code_interpreter_tool_call_delta import CodeInterpreterToolCallDelta as CodeInterpreterToolCallDelta diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..08a0dae7 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_logs.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_logs.cpython-311.pyc new file mode 100644 index 00000000..1eba2df5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_logs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_output_image.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_output_image.cpython-311.pyc new file mode 100644 index 00000000..4ea08e08 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_output_image.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call.cpython-311.pyc new file mode 100644 index 00000000..474bf2d1 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call_delta.cpython-311.pyc new file mode 100644 index 00000000..707732b4 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/file_search_tool_call.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/file_search_tool_call.cpython-311.pyc new file mode 100644 index 00000000..78571317 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/file_search_tool_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/file_search_tool_call_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/file_search_tool_call_delta.cpython-311.pyc new file mode 100644 index 00000000..c6413e63 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/file_search_tool_call_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/function_tool_call.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/function_tool_call.cpython-311.pyc new file mode 100644 index 00000000..654c12ac Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/function_tool_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/function_tool_call_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/function_tool_call_delta.cpython-311.pyc new file mode 100644 index 00000000..a520f860 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/function_tool_call_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/message_creation_step_details.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/message_creation_step_details.cpython-311.pyc new file mode 100644 index 00000000..c97c0259 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/message_creation_step_details.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step.cpython-311.pyc new file mode 100644 index 00000000..f419a45b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta.cpython-311.pyc new file mode 100644 index 00000000..2d4a75b8 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta_event.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta_event.cpython-311.pyc new file mode 100644 index 00000000..260c6a69 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta_event.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta_message_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta_message_delta.cpython-311.pyc new file mode 100644 index 00000000..9c038da1 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/run_step_delta_message_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/step_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/step_list_params.cpython-311.pyc new file mode 100644 index 00000000..d833bde3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/step_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call.cpython-311.pyc new file mode 100644 index 00000000..7f855a78 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call_delta.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call_delta.cpython-311.pyc new file mode 100644 index 00000000..29ac53d9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call_delta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call_delta_object.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call_delta_object.cpython-311.pyc new file mode 100644 index 00000000..4a645991 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_call_delta_object.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_calls_step_details.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_calls_step_details.cpython-311.pyc new file mode 100644 index 00000000..3a70aa54 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/threads/runs/__pycache__/tool_calls_step_details.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_logs.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_logs.py new file mode 100644 index 00000000..0bf8c1da --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_logs.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["CodeInterpreterLogs"] + + +class CodeInterpreterLogs(BaseModel): + index: int + """The index of the output in the outputs array.""" + + type: Literal["logs"] + """Always `logs`.""" + + logs: Optional[str] = None + """The text output from the Code Interpreter tool call.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_output_image.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_output_image.py new file mode 100644 index 00000000..2257f37e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_output_image.py @@ -0,0 +1,26 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["CodeInterpreterOutputImage", "Image"] + + +class Image(BaseModel): + file_id: Optional[str] = None + """ + The [file](https://platform.openai.com/docs/api-reference/files) ID of the + image. + """ + + +class CodeInterpreterOutputImage(BaseModel): + index: int + """The index of the output in the outputs array.""" + + type: Literal["image"] + """Always `image`.""" + + image: Optional[Image] = None diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_tool_call.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_tool_call.py new file mode 100644 index 00000000..2f072436 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_tool_call.py @@ -0,0 +1,70 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Union +from typing_extensions import Literal, Annotated + +from ....._utils import PropertyInfo +from ....._models import BaseModel + +__all__ = [ + "CodeInterpreterToolCall", + "CodeInterpreter", + "CodeInterpreterOutput", + "CodeInterpreterOutputLogs", + "CodeInterpreterOutputImage", + "CodeInterpreterOutputImageImage", +] + + +class CodeInterpreterOutputLogs(BaseModel): + logs: str + """The text output from the Code Interpreter tool call.""" + + type: Literal["logs"] + """Always `logs`.""" + + +class CodeInterpreterOutputImageImage(BaseModel): + file_id: str + """ + The [file](https://platform.openai.com/docs/api-reference/files) ID of the + image. + """ + + +class CodeInterpreterOutputImage(BaseModel): + image: CodeInterpreterOutputImageImage + + type: Literal["image"] + """Always `image`.""" + + +CodeInterpreterOutput = Annotated[ + Union[CodeInterpreterOutputLogs, CodeInterpreterOutputImage], PropertyInfo(discriminator="type") +] + + +class CodeInterpreter(BaseModel): + input: str + """The input to the Code Interpreter tool call.""" + + outputs: List[CodeInterpreterOutput] + """The outputs from the Code Interpreter tool call. + + Code Interpreter can output one or more items, including text (`logs`) or images + (`image`). Each of these are represented by a different object type. + """ + + +class CodeInterpreterToolCall(BaseModel): + id: str + """The ID of the tool call.""" + + code_interpreter: CodeInterpreter + """The Code Interpreter tool call definition.""" + + type: Literal["code_interpreter"] + """The type of tool call. + + This is always going to be `code_interpreter` for this type of tool call. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_tool_call_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_tool_call_delta.py new file mode 100644 index 00000000..eff76355 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/code_interpreter_tool_call_delta.py @@ -0,0 +1,44 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Union, Optional +from typing_extensions import Literal, Annotated + +from ....._utils import PropertyInfo +from ....._models import BaseModel +from .code_interpreter_logs import CodeInterpreterLogs +from .code_interpreter_output_image import CodeInterpreterOutputImage + +__all__ = ["CodeInterpreterToolCallDelta", "CodeInterpreter", "CodeInterpreterOutput"] + +CodeInterpreterOutput = Annotated[ + Union[CodeInterpreterLogs, CodeInterpreterOutputImage], PropertyInfo(discriminator="type") +] + + +class CodeInterpreter(BaseModel): + input: Optional[str] = None + """The input to the Code Interpreter tool call.""" + + outputs: Optional[List[CodeInterpreterOutput]] = None + """The outputs from the Code Interpreter tool call. + + Code Interpreter can output one or more items, including text (`logs`) or images + (`image`). Each of these are represented by a different object type. + """ + + +class CodeInterpreterToolCallDelta(BaseModel): + index: int + """The index of the tool call in the tool calls array.""" + + type: Literal["code_interpreter"] + """The type of tool call. + + This is always going to be `code_interpreter` for this type of tool call. + """ + + id: Optional[str] = None + """The ID of the tool call.""" + + code_interpreter: Optional[CodeInterpreter] = None + """The Code Interpreter tool call definition.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/file_search_tool_call.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/file_search_tool_call.py new file mode 100644 index 00000000..57c0ca9a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/file_search_tool_call.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["FileSearchToolCall"] + + +class FileSearchToolCall(BaseModel): + id: str + """The ID of the tool call object.""" + + file_search: object + """For now, this is always going to be an empty object.""" + + type: Literal["file_search"] + """The type of tool call. + + This is always going to be `file_search` for this type of tool call. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/file_search_tool_call_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/file_search_tool_call_delta.py new file mode 100644 index 00000000..df5ac217 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/file_search_tool_call_delta.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["FileSearchToolCallDelta"] + + +class FileSearchToolCallDelta(BaseModel): + file_search: object + """For now, this is always going to be an empty object.""" + + index: int + """The index of the tool call in the tool calls array.""" + + type: Literal["file_search"] + """The type of tool call. + + This is always going to be `file_search` for this type of tool call. + """ + + id: Optional[str] = None + """The ID of the tool call object.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/function_tool_call.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/function_tool_call.py new file mode 100644 index 00000000..b1d354f8 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/function_tool_call.py @@ -0,0 +1,38 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["FunctionToolCall", "Function"] + + +class Function(BaseModel): + arguments: str + """The arguments passed to the function.""" + + name: str + """The name of the function.""" + + output: Optional[str] = None + """The output of the function. + + This will be `null` if the outputs have not been + [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + yet. + """ + + +class FunctionToolCall(BaseModel): + id: str + """The ID of the tool call object.""" + + function: Function + """The definition of the function that was called.""" + + type: Literal["function"] + """The type of tool call. + + This is always going to be `function` for this type of tool call. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/function_tool_call_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/function_tool_call_delta.py new file mode 100644 index 00000000..faaf026f --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/function_tool_call_delta.py @@ -0,0 +1,41 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["FunctionToolCallDelta", "Function"] + + +class Function(BaseModel): + arguments: Optional[str] = None + """The arguments passed to the function.""" + + name: Optional[str] = None + """The name of the function.""" + + output: Optional[str] = None + """The output of the function. + + This will be `null` if the outputs have not been + [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + yet. + """ + + +class FunctionToolCallDelta(BaseModel): + index: int + """The index of the tool call in the tool calls array.""" + + type: Literal["function"] + """The type of tool call. + + This is always going to be `function` for this type of tool call. + """ + + id: Optional[str] = None + """The ID of the tool call object.""" + + function: Optional[Function] = None + """The definition of the function that was called.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/message_creation_step_details.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/message_creation_step_details.py new file mode 100644 index 00000000..73439079 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/message_creation_step_details.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["MessageCreationStepDetails", "MessageCreation"] + + +class MessageCreation(BaseModel): + message_id: str + """The ID of the message that was created by this run step.""" + + +class MessageCreationStepDetails(BaseModel): + message_creation: MessageCreation + + type: Literal["message_creation"] + """Always `message_creation`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step.py new file mode 100644 index 00000000..7c81dcac --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step.py @@ -0,0 +1,110 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from typing_extensions import Literal, Annotated + +from ....._utils import PropertyInfo +from ....._models import BaseModel +from .tool_calls_step_details import ToolCallsStepDetails +from .message_creation_step_details import MessageCreationStepDetails + +__all__ = ["RunStep", "LastError", "StepDetails", "Usage"] + + +class LastError(BaseModel): + code: Literal["server_error", "rate_limit_exceeded"] + """One of `server_error` or `rate_limit_exceeded`.""" + + message: str + """A human-readable description of the error.""" + + +StepDetails = Annotated[Union[MessageCreationStepDetails, ToolCallsStepDetails], PropertyInfo(discriminator="type")] + + +class Usage(BaseModel): + completion_tokens: int + """Number of completion tokens used over the course of the run step.""" + + prompt_tokens: int + """Number of prompt tokens used over the course of the run step.""" + + total_tokens: int + """Total number of tokens used (prompt + completion).""" + + +class RunStep(BaseModel): + id: str + """The identifier of the run step, which can be referenced in API endpoints.""" + + assistant_id: str + """ + The ID of the + [assistant](https://platform.openai.com/docs/api-reference/assistants) + associated with the run step. + """ + + cancelled_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run step was cancelled.""" + + completed_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run step completed.""" + + created_at: int + """The Unix timestamp (in seconds) for when the run step was created.""" + + expired_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run step expired. + + A step is considered expired if the parent run is expired. + """ + + failed_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the run step failed.""" + + last_error: Optional[LastError] = None + """The last error associated with this run step. + + Will be `null` if there are no errors. + """ + + metadata: Optional[object] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + object: Literal["thread.run.step"] + """The object type, which is always `thread.run.step`.""" + + run_id: str + """ + The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that + this run step is a part of. + """ + + status: Literal["in_progress", "cancelled", "failed", "completed", "expired"] + """ + The status of the run step, which can be either `in_progress`, `cancelled`, + `failed`, `completed`, or `expired`. + """ + + step_details: StepDetails + """The details of the run step.""" + + thread_id: str + """ + The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + that was run. + """ + + type: Literal["message_creation", "tool_calls"] + """The type of run step, which can be either `message_creation` or `tool_calls`.""" + + usage: Optional[Usage] = None + """Usage statistics related to the run step. + + This value will be `null` while the run step's status is `in_progress`. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta.py new file mode 100644 index 00000000..d6b4aefe --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from typing_extensions import Annotated + +from ....._utils import PropertyInfo +from ....._models import BaseModel +from .tool_call_delta_object import ToolCallDeltaObject +from .run_step_delta_message_delta import RunStepDeltaMessageDelta + +__all__ = ["RunStepDelta", "StepDetails"] + +StepDetails = Annotated[Union[RunStepDeltaMessageDelta, ToolCallDeltaObject], PropertyInfo(discriminator="type")] + + +class RunStepDelta(BaseModel): + step_details: Optional[StepDetails] = None + """The details of the run step.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta_event.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta_event.py new file mode 100644 index 00000000..7f3f92aa --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta_event.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ....._models import BaseModel +from .run_step_delta import RunStepDelta + +__all__ = ["RunStepDeltaEvent"] + + +class RunStepDeltaEvent(BaseModel): + id: str + """The identifier of the run step, which can be referenced in API endpoints.""" + + delta: RunStepDelta + """The delta containing the fields that have changed on the run step.""" + + object: Literal["thread.run.step.delta"] + """The object type, which is always `thread.run.step.delta`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta_message_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta_message_delta.py new file mode 100644 index 00000000..f58ed3d9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/run_step_delta_message_delta.py @@ -0,0 +1,20 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ....._models import BaseModel + +__all__ = ["RunStepDeltaMessageDelta", "MessageCreation"] + + +class MessageCreation(BaseModel): + message_id: Optional[str] = None + """The ID of the message that was created by this run step.""" + + +class RunStepDeltaMessageDelta(BaseModel): + type: Literal["message_creation"] + """Always `message_creation`.""" + + message_creation: Optional[MessageCreation] = None diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/step_list_params.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/step_list_params.py new file mode 100644 index 00000000..606d4445 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/step_list_params.py @@ -0,0 +1,41 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["StepListParams"] + + +class StepListParams(TypedDict, total=False): + thread_id: Required[str] + + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + before: str + """A cursor for use in pagination. + + `before` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page + of the list. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ + + order: Literal["asc", "desc"] + """Sort order by the `created_at` timestamp of the objects. + + `asc` for ascending order and `desc` for descending order. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call.py new file mode 100644 index 00000000..77d86b46 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated + +from ....._utils import PropertyInfo +from .function_tool_call import FunctionToolCall +from .file_search_tool_call import FileSearchToolCall +from .code_interpreter_tool_call import CodeInterpreterToolCall + +__all__ = ["ToolCall"] + +ToolCall = Annotated[ + Union[CodeInterpreterToolCall, FileSearchToolCall, FunctionToolCall], PropertyInfo(discriminator="type") +] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call_delta.py new file mode 100644 index 00000000..90cfe065 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call_delta.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated + +from ....._utils import PropertyInfo +from .function_tool_call_delta import FunctionToolCallDelta +from .file_search_tool_call_delta import FileSearchToolCallDelta +from .code_interpreter_tool_call_delta import CodeInterpreterToolCallDelta + +__all__ = ["ToolCallDelta"] + +ToolCallDelta = Annotated[ + Union[CodeInterpreterToolCallDelta, FileSearchToolCallDelta, FunctionToolCallDelta], + PropertyInfo(discriminator="type"), +] diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call_delta_object.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call_delta_object.py new file mode 100644 index 00000000..189dce77 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_call_delta_object.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ....._models import BaseModel +from .tool_call_delta import ToolCallDelta + +__all__ = ["ToolCallDeltaObject"] + + +class ToolCallDeltaObject(BaseModel): + type: Literal["tool_calls"] + """Always `tool_calls`.""" + + tool_calls: Optional[List[ToolCallDelta]] = None + """An array of tool calls the run step was involved in. + + These can be associated with one of three types of tools: `code_interpreter`, + `file_search`, or `function`. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_calls_step_details.py b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_calls_step_details.py new file mode 100644 index 00000000..a084d387 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/runs/tool_calls_step_details.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import Literal + +from .tool_call import ToolCall +from ....._models import BaseModel + +__all__ = ["ToolCallsStepDetails"] + + +class ToolCallsStepDetails(BaseModel): + tool_calls: List[ToolCall] + """An array of tool calls the run step was involved in. + + These can be associated with one of three types of tools: `code_interpreter`, + `file_search`, or `function`. + """ + + type: Literal["tool_calls"] + """Always `tool_calls`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/text.py b/.venv/Lib/site-packages/openai/types/beta/threads/text.py new file mode 100644 index 00000000..853bec29 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/text.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from ...._models import BaseModel +from .annotation import Annotation + +__all__ = ["Text"] + + +class Text(BaseModel): + annotations: List[Annotation] + + value: str + """The data that makes up the text.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/text_content_block.py b/.venv/Lib/site-packages/openai/types/beta/threads/text_content_block.py new file mode 100644 index 00000000..3706d6b9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/text_content_block.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from .text import Text +from ...._models import BaseModel + +__all__ = ["TextContentBlock"] + + +class TextContentBlock(BaseModel): + text: Text + + type: Literal["text"] + """Always `text`.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/text_delta.py b/.venv/Lib/site-packages/openai/types/beta/threads/text_delta.py new file mode 100644 index 00000000..09cd3570 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/text_delta.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ...._models import BaseModel +from .annotation_delta import AnnotationDelta + +__all__ = ["TextDelta"] + + +class TextDelta(BaseModel): + annotations: Optional[List[AnnotationDelta]] = None + + value: Optional[str] = None + """The data that makes up the text.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/threads/text_delta_block.py b/.venv/Lib/site-packages/openai/types/beta/threads/text_delta_block.py new file mode 100644 index 00000000..586116e0 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/threads/text_delta_block.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ...._models import BaseModel +from .text_delta import TextDelta + +__all__ = ["TextDeltaBlock"] + + +class TextDeltaBlock(BaseModel): + index: int + """The index of the content part in the message.""" + + type: Literal["text"] + """Always `text`.""" + + text: Optional[TextDelta] = None diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_store.py b/.venv/Lib/site-packages/openai/types/beta/vector_store.py new file mode 100644 index 00000000..488961b4 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_store.py @@ -0,0 +1,79 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["VectorStore", "FileCounts", "ExpiresAfter"] + + +class FileCounts(BaseModel): + cancelled: int + """The number of files that were cancelled.""" + + completed: int + """The number of files that have been successfully processed.""" + + failed: int + """The number of files that have failed to process.""" + + in_progress: int + """The number of files that are currently being processed.""" + + total: int + """The total number of files.""" + + +class ExpiresAfter(BaseModel): + anchor: Literal["last_active_at"] + """Anchor timestamp after which the expiration policy applies. + + Supported anchors: `last_active_at`. + """ + + days: int + """The number of days after the anchor time that the vector store will expire.""" + + +class VectorStore(BaseModel): + id: str + """The identifier, which can be referenced in API endpoints.""" + + created_at: int + """The Unix timestamp (in seconds) for when the vector store was created.""" + + file_counts: FileCounts + + last_active_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the vector store was last active.""" + + metadata: Optional[object] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + name: str + """The name of the vector store.""" + + object: Literal["vector_store"] + """The object type, which is always `vector_store`.""" + + status: Literal["expired", "in_progress", "completed"] + """ + The status of the vector store, which can be either `expired`, `in_progress`, or + `completed`. A status of `completed` indicates that the vector store is ready + for use. + """ + + usage_bytes: int + """The total number of bytes used by the files in the vector store.""" + + expires_after: Optional[ExpiresAfter] = None + """The expiration policy for a vector store.""" + + expires_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the vector store will expire.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_store_create_params.py b/.venv/Lib/site-packages/openai/types/beta/vector_store_create_params.py new file mode 100644 index 00000000..f1a3abcb --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_store_create_params.py @@ -0,0 +1,42 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["VectorStoreCreateParams", "ExpiresAfter"] + + +class VectorStoreCreateParams(TypedDict, total=False): + expires_after: ExpiresAfter + """The expiration policy for a vector store.""" + + file_ids: List[str] + """ + A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + the vector store should use. Useful for tools like `file_search` that can access + files. + """ + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + name: str + """The name of the vector store.""" + + +class ExpiresAfter(TypedDict, total=False): + anchor: Required[Literal["last_active_at"]] + """Anchor timestamp after which the expiration policy applies. + + Supported anchors: `last_active_at`. + """ + + days: Required[int] + """The number of days after the anchor time that the vector store will expire.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_store_deleted.py b/.venv/Lib/site-packages/openai/types/beta/vector_store_deleted.py new file mode 100644 index 00000000..21ccda1d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_store_deleted.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["VectorStoreDeleted"] + + +class VectorStoreDeleted(BaseModel): + id: str + + deleted: bool + + object: Literal["vector_store.deleted"] diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_store_list_params.py b/.venv/Lib/site-packages/openai/types/beta/vector_store_list_params.py new file mode 100644 index 00000000..f39f6726 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_store_list_params.py @@ -0,0 +1,39 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["VectorStoreListParams"] + + +class VectorStoreListParams(TypedDict, total=False): + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + before: str + """A cursor for use in pagination. + + `before` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page + of the list. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ + + order: Literal["asc", "desc"] + """Sort order by the `created_at` timestamp of the objects. + + `asc` for ascending order and `desc` for descending order. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_store_update_params.py b/.venv/Lib/site-packages/openai/types/beta/vector_store_update_params.py new file mode 100644 index 00000000..0f9593e4 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_store_update_params.py @@ -0,0 +1,35 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["VectorStoreUpdateParams", "ExpiresAfter"] + + +class VectorStoreUpdateParams(TypedDict, total=False): + expires_after: Optional[ExpiresAfter] + """The expiration policy for a vector store.""" + + metadata: Optional[object] + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format. Keys can be a maximum of 64 characters long and values can be + a maxium of 512 characters long. + """ + + name: Optional[str] + """The name of the vector store.""" + + +class ExpiresAfter(TypedDict, total=False): + anchor: Required[Literal["last_active_at"]] + """Anchor timestamp after which the expiration policy applies. + + Supported anchors: `last_active_at`. + """ + + days: Required[int] + """The number of days after the anchor time that the vector store will expire.""" diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__init__.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__init__.py new file mode 100644 index 00000000..ff05dd63 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__init__.py @@ -0,0 +1,11 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .file_list_params import FileListParams as FileListParams +from .vector_store_file import VectorStoreFile as VectorStoreFile +from .file_create_params import FileCreateParams as FileCreateParams +from .vector_store_file_batch import VectorStoreFileBatch as VectorStoreFileBatch +from .file_batch_create_params import FileBatchCreateParams as FileBatchCreateParams +from .vector_store_file_deleted import VectorStoreFileDeleted as VectorStoreFileDeleted +from .file_batch_list_files_params import FileBatchListFilesParams as FileBatchListFilesParams diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..a2ba586f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_batch_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_batch_create_params.cpython-311.pyc new file mode 100644 index 00000000..43137224 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_batch_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_batch_list_files_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_batch_list_files_params.cpython-311.pyc new file mode 100644 index 00000000..acd61633 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_batch_list_files_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_create_params.cpython-311.pyc new file mode 100644 index 00000000..20b696db Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_list_params.cpython-311.pyc new file mode 100644 index 00000000..a6cbb02b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/file_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file.cpython-311.pyc new file mode 100644 index 00000000..bf1594fb Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file_batch.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file_batch.cpython-311.pyc new file mode 100644 index 00000000..0113e4a5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file_batch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file_deleted.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file_deleted.cpython-311.pyc new file mode 100644 index 00000000..912c0857 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/beta/vector_stores/__pycache__/vector_store_file_deleted.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_batch_create_params.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_batch_create_params.py new file mode 100644 index 00000000..08828297 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_batch_create_params.py @@ -0,0 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Required, TypedDict + +__all__ = ["FileBatchCreateParams"] + + +class FileBatchCreateParams(TypedDict, total=False): + file_ids: Required[List[str]] + """ + A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + the vector store should use. Useful for tools like `file_search` that can access + files. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_batch_list_files_params.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_batch_list_files_params.py new file mode 100644 index 00000000..24dee7d5 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_batch_list_files_params.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["FileBatchListFilesParams"] + + +class FileBatchListFilesParams(TypedDict, total=False): + vector_store_id: Required[str] + + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + before: str + """A cursor for use in pagination. + + `before` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page + of the list. + """ + + filter: Literal["in_progress", "completed", "failed", "cancelled"] + """Filter by file status. + + One of `in_progress`, `completed`, `failed`, `cancelled`. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ + + order: Literal["asc", "desc"] + """Sort order by the `created_at` timestamp of the objects. + + `asc` for ascending order and `desc` for descending order. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_create_params.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_create_params.py new file mode 100644 index 00000000..2fee588a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_create_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FileCreateParams"] + + +class FileCreateParams(TypedDict, total=False): + file_id: Required[str] + """ + A [File](https://platform.openai.com/docs/api-reference/files) ID that the + vector store should use. Useful for tools like `file_search` that can access + files. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_list_params.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_list_params.py new file mode 100644 index 00000000..23dd7f0d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/file_list_params.py @@ -0,0 +1,45 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["FileListParams"] + + +class FileListParams(TypedDict, total=False): + after: str + """A cursor for use in pagination. + + `after` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include after=obj_foo in order to fetch the next page of the + list. + """ + + before: str + """A cursor for use in pagination. + + `before` is an object ID that defines your place in the list. For instance, if + you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page + of the list. + """ + + filter: Literal["in_progress", "completed", "failed", "cancelled"] + """Filter by file status. + + One of `in_progress`, `completed`, `failed`, `cancelled`. + """ + + limit: int + """A limit on the number of objects to be returned. + + Limit can range between 1 and 100, and the default is 20. + """ + + order: Literal["asc", "desc"] + """Sort order by the `created_at` timestamp of the objects. + + `asc` for ascending order and `desc` for descending order. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file.py new file mode 100644 index 00000000..3fab4896 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file.py @@ -0,0 +1,54 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["VectorStoreFile", "LastError"] + + +class LastError(BaseModel): + code: Literal["internal_error", "file_not_found", "parsing_error", "unhandled_mime_type"] + """One of `server_error` or `rate_limit_exceeded`.""" + + message: str + """A human-readable description of the error.""" + + +class VectorStoreFile(BaseModel): + id: str + """The identifier, which can be referenced in API endpoints.""" + + created_at: int + """The Unix timestamp (in seconds) for when the vector store file was created.""" + + last_error: Optional[LastError] = None + """The last error associated with this vector store file. + + Will be `null` if there are no errors. + """ + + object: Literal["vector_store.file"] + """The object type, which is always `vector_store.file`.""" + + status: Literal["in_progress", "completed", "cancelled", "failed"] + """ + The status of the vector store file, which can be either `in_progress`, + `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + vector store file is ready for use. + """ + + usage_bytes: int + """The total vector store usage in bytes. + + Note that this may be different from the original file size. + """ + + vector_store_id: str + """ + The ID of the + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + that the [File](https://platform.openai.com/docs/api-reference/files) is + attached to. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file_batch.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file_batch.py new file mode 100644 index 00000000..df130a58 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file_batch.py @@ -0,0 +1,54 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["VectorStoreFileBatch", "FileCounts"] + + +class FileCounts(BaseModel): + cancelled: int + """The number of files that where cancelled.""" + + completed: int + """The number of files that have been processed.""" + + failed: int + """The number of files that have failed to process.""" + + in_progress: int + """The number of files that are currently being processed.""" + + total: int + """The total number of files.""" + + +class VectorStoreFileBatch(BaseModel): + id: str + """The identifier, which can be referenced in API endpoints.""" + + created_at: int + """ + The Unix timestamp (in seconds) for when the vector store files batch was + created. + """ + + file_counts: FileCounts + + object: Literal["vector_store.files_batch"] + """The object type, which is always `vector_store.file_batch`.""" + + status: Literal["in_progress", "completed", "cancelled", "failed"] + """ + The status of the vector store files batch, which can be either `in_progress`, + `completed`, `cancelled` or `failed`. + """ + + vector_store_id: str + """ + The ID of the + [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + that the [File](https://platform.openai.com/docs/api-reference/files) is + attached to. + """ diff --git a/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file_deleted.py b/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file_deleted.py new file mode 100644 index 00000000..ae37f843 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/beta/vector_stores/vector_store_file_deleted.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["VectorStoreFileDeleted"] + + +class VectorStoreFileDeleted(BaseModel): + id: str + + deleted: bool + + object: Literal["vector_store.file.deleted"] diff --git a/.venv/Lib/site-packages/openai/types/chat/__init__.py b/.venv/Lib/site-packages/openai/types/chat/__init__.py new file mode 100644 index 00000000..5d122d20 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/__init__.py @@ -0,0 +1,41 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .chat_completion import ChatCompletion as ChatCompletion +from .chat_completion_role import ChatCompletionRole as ChatCompletionRole +from .chat_completion_chunk import ChatCompletionChunk as ChatCompletionChunk +from .chat_completion_message import ChatCompletionMessage as ChatCompletionMessage +from .completion_create_params import CompletionCreateParams as CompletionCreateParams +from .chat_completion_tool_param import ChatCompletionToolParam as ChatCompletionToolParam +from .chat_completion_message_param import ChatCompletionMessageParam as ChatCompletionMessageParam +from .chat_completion_token_logprob import ChatCompletionTokenLogprob as ChatCompletionTokenLogprob +from .chat_completion_message_tool_call import ChatCompletionMessageToolCall as ChatCompletionMessageToolCall +from .chat_completion_content_part_param import ChatCompletionContentPartParam as ChatCompletionContentPartParam +from .chat_completion_tool_message_param import ChatCompletionToolMessageParam as ChatCompletionToolMessageParam +from .chat_completion_user_message_param import ChatCompletionUserMessageParam as ChatCompletionUserMessageParam +from .chat_completion_system_message_param import ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam +from .chat_completion_function_message_param import ( + ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, +) +from .chat_completion_assistant_message_param import ( + ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, +) +from .chat_completion_content_part_text_param import ( + ChatCompletionContentPartTextParam as ChatCompletionContentPartTextParam, +) +from .chat_completion_message_tool_call_param import ( + ChatCompletionMessageToolCallParam as ChatCompletionMessageToolCallParam, +) +from .chat_completion_named_tool_choice_param import ( + ChatCompletionNamedToolChoiceParam as ChatCompletionNamedToolChoiceParam, +) +from .chat_completion_content_part_image_param import ( + ChatCompletionContentPartImageParam as ChatCompletionContentPartImageParam, +) +from .chat_completion_tool_choice_option_param import ( + ChatCompletionToolChoiceOptionParam as ChatCompletionToolChoiceOptionParam, +) +from .chat_completion_function_call_option_param import ( + ChatCompletionFunctionCallOptionParam as ChatCompletionFunctionCallOptionParam, +) diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..47a4beb6 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion.cpython-311.pyc new file mode 100644 index 00000000..c9b8ea51 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_assistant_message_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_assistant_message_param.cpython-311.pyc new file mode 100644 index 00000000..bb446c39 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_assistant_message_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_chunk.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_chunk.cpython-311.pyc new file mode 100644 index 00000000..7ee87335 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_chunk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_image_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_image_param.cpython-311.pyc new file mode 100644 index 00000000..fd76d0ba Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_image_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_param.cpython-311.pyc new file mode 100644 index 00000000..b88b59f7 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_text_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_text_param.cpython-311.pyc new file mode 100644 index 00000000..a3fa0717 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_content_part_text_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_function_call_option_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_function_call_option_param.cpython-311.pyc new file mode 100644 index 00000000..4ee8259f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_function_call_option_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_function_message_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_function_message_param.cpython-311.pyc new file mode 100644 index 00000000..b0e1a026 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_function_message_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message.cpython-311.pyc new file mode 100644 index 00000000..ad5c1f5a Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_param.cpython-311.pyc new file mode 100644 index 00000000..38e2176c Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_tool_call.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_tool_call.cpython-311.pyc new file mode 100644 index 00000000..6561e1b1 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_tool_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_tool_call_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_tool_call_param.cpython-311.pyc new file mode 100644 index 00000000..26fc7dbc Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_message_tool_call_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_named_tool_choice_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_named_tool_choice_param.cpython-311.pyc new file mode 100644 index 00000000..6cf2b506 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_named_tool_choice_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_role.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_role.cpython-311.pyc new file mode 100644 index 00000000..2c515254 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_role.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_system_message_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_system_message_param.cpython-311.pyc new file mode 100644 index 00000000..72af4725 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_system_message_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_token_logprob.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_token_logprob.cpython-311.pyc new file mode 100644 index 00000000..f2707485 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_token_logprob.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_choice_option_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_choice_option_param.cpython-311.pyc new file mode 100644 index 00000000..b9199b13 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_choice_option_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_message_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_message_param.cpython-311.pyc new file mode 100644 index 00000000..59ad92fe Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_message_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_param.cpython-311.pyc new file mode 100644 index 00000000..4ed39439 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_tool_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_user_message_param.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_user_message_param.cpython-311.pyc new file mode 100644 index 00000000..a34edc4e Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/chat_completion_user_message_param.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/__pycache__/completion_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/chat/__pycache__/completion_create_params.cpython-311.pyc new file mode 100644 index 00000000..29ed5af9 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/chat/__pycache__/completion_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion.py new file mode 100644 index 00000000..61a94a25 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion.py @@ -0,0 +1,67 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel +from ..completion_usage import CompletionUsage +from .chat_completion_message import ChatCompletionMessage +from .chat_completion_token_logprob import ChatCompletionTokenLogprob + +__all__ = ["ChatCompletion", "Choice", "ChoiceLogprobs"] + + +class ChoiceLogprobs(BaseModel): + content: Optional[List[ChatCompletionTokenLogprob]] = None + """A list of message content tokens with log probability information.""" + + +class Choice(BaseModel): + finish_reason: Literal["stop", "length", "tool_calls", "content_filter", "function_call"] + """The reason the model stopped generating tokens. + + This will be `stop` if the model hit a natural stop point or a provided stop + sequence, `length` if the maximum number of tokens specified in the request was + reached, `content_filter` if content was omitted due to a flag from our content + filters, `tool_calls` if the model called a tool, or `function_call` + (deprecated) if the model called a function. + """ + + index: int + """The index of the choice in the list of choices.""" + + logprobs: Optional[ChoiceLogprobs] = None + """Log probability information for the choice.""" + + message: ChatCompletionMessage + """A chat completion message generated by the model.""" + + +class ChatCompletion(BaseModel): + id: str + """A unique identifier for the chat completion.""" + + choices: List[Choice] + """A list of chat completion choices. + + Can be more than one if `n` is greater than 1. + """ + + created: int + """The Unix timestamp (in seconds) of when the chat completion was created.""" + + model: str + """The model used for the chat completion.""" + + object: Literal["chat.completion"] + """The object type, which is always `chat.completion`.""" + + system_fingerprint: Optional[str] = None + """This fingerprint represents the backend configuration that the model runs with. + + Can be used in conjunction with the `seed` request parameter to understand when + backend changes have been made that might impact determinism. + """ + + usage: Optional[CompletionUsage] = None + """Usage statistics for the completion request.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_assistant_message_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_assistant_message_param.py new file mode 100644 index 00000000..e1e39948 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_assistant_message_param.py @@ -0,0 +1,51 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from .chat_completion_message_tool_call_param import ChatCompletionMessageToolCallParam + +__all__ = ["ChatCompletionAssistantMessageParam", "FunctionCall"] + + +class FunctionCall(TypedDict, total=False): + arguments: Required[str] + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: Required[str] + """The name of the function to call.""" + + +class ChatCompletionAssistantMessageParam(TypedDict, total=False): + role: Required[Literal["assistant"]] + """The role of the messages author, in this case `assistant`.""" + + content: Optional[str] + """The contents of the assistant message. + + Required unless `tool_calls` or `function_call` is specified. + """ + + function_call: FunctionCall + """Deprecated and replaced by `tool_calls`. + + The name and arguments of a function that should be called, as generated by the + model. + """ + + name: str + """An optional name for the participant. + + Provides the model information to differentiate between participants of the same + role. + """ + + tool_calls: Iterable[ChatCompletionMessageToolCallParam] + """The tool calls generated by the model, such as function calls.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_chunk.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_chunk.py new file mode 100644 index 00000000..c2f18bcb --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_chunk.py @@ -0,0 +1,128 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel +from .chat_completion_token_logprob import ChatCompletionTokenLogprob + +__all__ = [ + "ChatCompletionChunk", + "Choice", + "ChoiceDelta", + "ChoiceDeltaFunctionCall", + "ChoiceDeltaToolCall", + "ChoiceDeltaToolCallFunction", + "ChoiceLogprobs", +] + + +class ChoiceDeltaFunctionCall(BaseModel): + arguments: Optional[str] = None + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: Optional[str] = None + """The name of the function to call.""" + + +class ChoiceDeltaToolCallFunction(BaseModel): + arguments: Optional[str] = None + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: Optional[str] = None + """The name of the function to call.""" + + +class ChoiceDeltaToolCall(BaseModel): + index: int + + id: Optional[str] = None + """The ID of the tool call.""" + + function: Optional[ChoiceDeltaToolCallFunction] = None + + type: Optional[Literal["function"]] = None + """The type of the tool. Currently, only `function` is supported.""" + + +class ChoiceDelta(BaseModel): + content: Optional[str] = None + """The contents of the chunk message.""" + + function_call: Optional[ChoiceDeltaFunctionCall] = None + """Deprecated and replaced by `tool_calls`. + + The name and arguments of a function that should be called, as generated by the + model. + """ + + role: Optional[Literal["system", "user", "assistant", "tool"]] = None + """The role of the author of this message.""" + + tool_calls: Optional[List[ChoiceDeltaToolCall]] = None + + +class ChoiceLogprobs(BaseModel): + content: Optional[List[ChatCompletionTokenLogprob]] = None + """A list of message content tokens with log probability information.""" + + +class Choice(BaseModel): + delta: ChoiceDelta + """A chat completion delta generated by streamed model responses.""" + + finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] = None + """The reason the model stopped generating tokens. + + This will be `stop` if the model hit a natural stop point or a provided stop + sequence, `length` if the maximum number of tokens specified in the request was + reached, `content_filter` if content was omitted due to a flag from our content + filters, `tool_calls` if the model called a tool, or `function_call` + (deprecated) if the model called a function. + """ + + index: int + """The index of the choice in the list of choices.""" + + logprobs: Optional[ChoiceLogprobs] = None + """Log probability information for the choice.""" + + +class ChatCompletionChunk(BaseModel): + id: str + """A unique identifier for the chat completion. Each chunk has the same ID.""" + + choices: List[Choice] + """A list of chat completion choices. + + Can be more than one if `n` is greater than 1. + """ + + created: int + """The Unix timestamp (in seconds) of when the chat completion was created. + + Each chunk has the same timestamp. + """ + + model: str + """The model to generate the completion.""" + + object: Literal["chat.completion.chunk"] + """The object type, which is always `chat.completion.chunk`.""" + + system_fingerprint: Optional[str] = None + """ + This fingerprint represents the backend configuration that the model runs with. + Can be used in conjunction with the `seed` request parameter to understand when + backend changes have been made that might impact determinism. + """ diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_image_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_image_param.py new file mode 100644 index 00000000..b1a186aa --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_image_param.py @@ -0,0 +1,26 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ChatCompletionContentPartImageParam", "ImageURL"] + + +class ImageURL(TypedDict, total=False): + url: Required[str] + """Either a URL of the image or the base64 encoded image data.""" + + detail: Literal["auto", "low", "high"] + """Specifies the detail level of the image. + + Learn more in the + [Vision guide](https://platform.openai.com/docs/guides/vision/low-or-high-fidelity-image-understanding). + """ + + +class ChatCompletionContentPartImageParam(TypedDict, total=False): + image_url: Required[ImageURL] + + type: Required[Literal["image_url"]] + """The type of the content part.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_param.py new file mode 100644 index 00000000..f9b5f71e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union + +from .chat_completion_content_part_text_param import ChatCompletionContentPartTextParam +from .chat_completion_content_part_image_param import ChatCompletionContentPartImageParam + +__all__ = ["ChatCompletionContentPartParam"] + +ChatCompletionContentPartParam = Union[ChatCompletionContentPartTextParam, ChatCompletionContentPartImageParam] diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_text_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_text_param.py new file mode 100644 index 00000000..a2707444 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_content_part_text_param.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ChatCompletionContentPartTextParam"] + + +class ChatCompletionContentPartTextParam(TypedDict, total=False): + text: Required[str] + """The text content.""" + + type: Required[Literal["text"]] + """The type of the content part.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_function_call_option_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_function_call_option_param.py new file mode 100644 index 00000000..2bc014af --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_function_call_option_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["ChatCompletionFunctionCallOptionParam"] + + +class ChatCompletionFunctionCallOptionParam(TypedDict, total=False): + name: Required[str] + """The name of the function to call.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_function_message_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_function_message_param.py new file mode 100644 index 00000000..5af12bf9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_function_message_param.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ChatCompletionFunctionMessageParam"] + + +class ChatCompletionFunctionMessageParam(TypedDict, total=False): + content: Required[Optional[str]] + """The contents of the function message.""" + + name: Required[str] + """The name of the function to call.""" + + role: Required[Literal["function"]] + """The role of the messages author, in this case `function`.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_message.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message.py new file mode 100644 index 00000000..8db7d17d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message.py @@ -0,0 +1,40 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel +from .chat_completion_message_tool_call import ChatCompletionMessageToolCall + +__all__ = ["ChatCompletionMessage", "FunctionCall"] + + +class FunctionCall(BaseModel): + arguments: str + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: str + """The name of the function to call.""" + + +class ChatCompletionMessage(BaseModel): + content: Optional[str] = None + """The contents of the message.""" + + role: Literal["assistant"] + """The role of the author of this message.""" + + function_call: Optional[FunctionCall] = None + """Deprecated and replaced by `tool_calls`. + + The name and arguments of a function that should be called, as generated by the + model. + """ + + tool_calls: Optional[List[ChatCompletionMessageToolCall]] = None + """The tool calls generated by the model, such as function calls.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_param.py new file mode 100644 index 00000000..a3644a53 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_param.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union + +from .chat_completion_tool_message_param import ChatCompletionToolMessageParam +from .chat_completion_user_message_param import ChatCompletionUserMessageParam +from .chat_completion_system_message_param import ChatCompletionSystemMessageParam +from .chat_completion_function_message_param import ChatCompletionFunctionMessageParam +from .chat_completion_assistant_message_param import ChatCompletionAssistantMessageParam + +__all__ = ["ChatCompletionMessageParam"] + +ChatCompletionMessageParam = Union[ + ChatCompletionSystemMessageParam, + ChatCompletionUserMessageParam, + ChatCompletionAssistantMessageParam, + ChatCompletionToolMessageParam, + ChatCompletionFunctionMessageParam, +] diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_tool_call.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_tool_call.py new file mode 100644 index 00000000..4fec6670 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_tool_call.py @@ -0,0 +1,31 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["ChatCompletionMessageToolCall", "Function"] + + +class Function(BaseModel): + arguments: str + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: str + """The name of the function to call.""" + + +class ChatCompletionMessageToolCall(BaseModel): + id: str + """The ID of the tool call.""" + + function: Function + """The function that the model called.""" + + type: Literal["function"] + """The type of the tool. Currently, only `function` is supported.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_tool_call_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_tool_call_param.py new file mode 100644 index 00000000..f616c363 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_message_tool_call_param.py @@ -0,0 +1,31 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ChatCompletionMessageToolCallParam", "Function"] + + +class Function(TypedDict, total=False): + arguments: Required[str] + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: Required[str] + """The name of the function to call.""" + + +class ChatCompletionMessageToolCallParam(TypedDict, total=False): + id: Required[str] + """The ID of the tool call.""" + + function: Required[Function] + """The function that the model called.""" + + type: Required[Literal["function"]] + """The type of the tool. Currently, only `function` is supported.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_named_tool_choice_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_named_tool_choice_param.py new file mode 100644 index 00000000..369f8b42 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_named_tool_choice_param.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ChatCompletionNamedToolChoiceParam", "Function"] + + +class Function(TypedDict, total=False): + name: Required[str] + """The name of the function to call.""" + + +class ChatCompletionNamedToolChoiceParam(TypedDict, total=False): + function: Required[Function] + + type: Required[Literal["function"]] + """The type of the tool. Currently, only `function` is supported.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_role.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_role.py new file mode 100644 index 00000000..1fd83888 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_role.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +__all__ = ["ChatCompletionRole"] + +ChatCompletionRole = Literal["system", "user", "assistant", "tool", "function"] diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_system_message_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_system_message_param.py new file mode 100644 index 00000000..94bb3f63 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_system_message_param.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ChatCompletionSystemMessageParam"] + + +class ChatCompletionSystemMessageParam(TypedDict, total=False): + content: Required[str] + """The contents of the system message.""" + + role: Required[Literal["system"]] + """The role of the messages author, in this case `system`.""" + + name: str + """An optional name for the participant. + + Provides the model information to differentiate between participants of the same + role. + """ diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_token_logprob.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_token_logprob.py new file mode 100644 index 00000000..c69e2589 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_token_logprob.py @@ -0,0 +1,57 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["ChatCompletionTokenLogprob", "TopLogprob"] + + +class TopLogprob(BaseModel): + token: str + """The token.""" + + bytes: Optional[List[int]] = None + """A list of integers representing the UTF-8 bytes representation of the token. + + Useful in instances where characters are represented by multiple tokens and + their byte representations must be combined to generate the correct text + representation. Can be `null` if there is no bytes representation for the token. + """ + + logprob: float + """The log probability of this token, if it is within the top 20 most likely + tokens. + + Otherwise, the value `-9999.0` is used to signify that the token is very + unlikely. + """ + + +class ChatCompletionTokenLogprob(BaseModel): + token: str + """The token.""" + + bytes: Optional[List[int]] = None + """A list of integers representing the UTF-8 bytes representation of the token. + + Useful in instances where characters are represented by multiple tokens and + their byte representations must be combined to generate the correct text + representation. Can be `null` if there is no bytes representation for the token. + """ + + logprob: float + """The log probability of this token, if it is within the top 20 most likely + tokens. + + Otherwise, the value `-9999.0` is used to signify that the token is very + unlikely. + """ + + top_logprobs: List[TopLogprob] + """List of the most likely tokens and their log probability, at this token + position. + + In rare cases, there may be fewer than the number of requested `top_logprobs` + returned. + """ diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_choice_option_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_choice_option_param.py new file mode 100644 index 00000000..1d3c2506 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_choice_option_param.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal + +from .chat_completion_named_tool_choice_param import ChatCompletionNamedToolChoiceParam + +__all__ = ["ChatCompletionToolChoiceOptionParam"] + +ChatCompletionToolChoiceOptionParam = Union[Literal["none", "auto", "required"], ChatCompletionNamedToolChoiceParam] diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_message_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_message_param.py new file mode 100644 index 00000000..5c590e03 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_message_param.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ChatCompletionToolMessageParam"] + + +class ChatCompletionToolMessageParam(TypedDict, total=False): + content: Required[str] + """The contents of the tool message.""" + + role: Required[Literal["tool"]] + """The role of the messages author, in this case `tool`.""" + + tool_call_id: Required[str] + """Tool call that this message is responding to.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_param.py new file mode 100644 index 00000000..0cf6ea72 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_tool_param.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +from ...types import shared_params + +__all__ = ["ChatCompletionToolParam"] + + +class ChatCompletionToolParam(TypedDict, total=False): + function: Required[shared_params.FunctionDefinition] + + type: Required[Literal["function"]] + """The type of the tool. Currently, only `function` is supported.""" diff --git a/.venv/Lib/site-packages/openai/types/chat/chat_completion_user_message_param.py b/.venv/Lib/site-packages/openai/types/chat/chat_completion_user_message_param.py new file mode 100644 index 00000000..5c15322a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/chat_completion_user_message_param.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable +from typing_extensions import Literal, Required, TypedDict + +from .chat_completion_content_part_param import ChatCompletionContentPartParam + +__all__ = ["ChatCompletionUserMessageParam"] + + +class ChatCompletionUserMessageParam(TypedDict, total=False): + content: Required[Union[str, Iterable[ChatCompletionContentPartParam]]] + """The contents of the user message.""" + + role: Required[Literal["user"]] + """The role of the messages author, in this case `user`.""" + + name: str + """An optional name for the participant. + + Provides the model information to differentiate between participants of the same + role. + """ diff --git a/.venv/Lib/site-packages/openai/types/chat/completion_create_params.py b/.venv/Lib/site-packages/openai/types/chat/completion_create_params.py new file mode 100644 index 00000000..d30da60b --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat/completion_create_params.py @@ -0,0 +1,257 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from ...types import shared_params +from ..chat_model import ChatModel +from .chat_completion_tool_param import ChatCompletionToolParam +from .chat_completion_message_param import ChatCompletionMessageParam +from .chat_completion_tool_choice_option_param import ChatCompletionToolChoiceOptionParam +from .chat_completion_function_call_option_param import ChatCompletionFunctionCallOptionParam + +__all__ = [ + "CompletionCreateParamsBase", + "FunctionCall", + "Function", + "ResponseFormat", + "CompletionCreateParamsNonStreaming", + "CompletionCreateParamsStreaming", +] + + +class CompletionCreateParamsBase(TypedDict, total=False): + messages: Required[Iterable[ChatCompletionMessageParam]] + """A list of messages comprising the conversation so far. + + [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). + """ + + model: Required[Union[str, ChatModel]] + """ID of the model to use. + + See the + [model endpoint compatibility](https://platform.openai.com/docs/models/model-endpoint-compatibility) + table for details on which models work with the Chat API. + """ + + frequency_penalty: Optional[float] + """Number between -2.0 and 2.0. + + Positive values penalize new tokens based on their existing frequency in the + text so far, decreasing the model's likelihood to repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + """ + + function_call: FunctionCall + """Deprecated in favor of `tool_choice`. + + Controls which (if any) function is called by the model. `none` means the model + will not call a function and instead generates a message. `auto` means the model + can pick between generating a message or calling a function. Specifying a + particular function via `{"name": "my_function"}` forces the model to call that + function. + + `none` is the default when no functions are present. `auto` is the default if + functions are present. + """ + + functions: Iterable[Function] + """Deprecated in favor of `tools`. + + A list of functions the model may generate JSON inputs for. + """ + + logit_bias: Optional[Dict[str, int]] + """Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the + tokenizer) to an associated bias value from -100 to 100. Mathematically, the + bias is added to the logits generated by the model prior to sampling. The exact + effect will vary per model, but values between -1 and 1 should decrease or + increase likelihood of selection; values like -100 or 100 should result in a ban + or exclusive selection of the relevant token. + """ + + logprobs: Optional[bool] + """Whether to return log probabilities of the output tokens or not. + + If true, returns the log probabilities of each output token returned in the + `content` of `message`. + """ + + max_tokens: Optional[int] + """ + The maximum number of [tokens](/tokenizer) that can be generated in the chat + completion. + + The total length of input tokens and generated tokens is limited by the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + """ + + n: Optional[int] + """How many chat completion choices to generate for each input message. + + Note that you will be charged based on the number of generated tokens across all + of the choices. Keep `n` as `1` to minimize costs. + """ + + presence_penalty: Optional[float] + """Number between -2.0 and 2.0. + + Positive values penalize new tokens based on whether they appear in the text so + far, increasing the model's likelihood to talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + """ + + response_format: ResponseFormat + """An object specifying the format that the model must output. + + Compatible with + [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and + all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. + + Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + message the model generates is valid JSON. + + **Important:** when using JSON mode, you **must** also instruct the model to + produce JSON yourself via a system or user message. Without this, the model may + generate an unending stream of whitespace until the generation reaches the token + limit, resulting in a long-running and seemingly "stuck" request. Also note that + the message content may be partially cut off if `finish_reason="length"`, which + indicates the generation exceeded `max_tokens` or the conversation exceeded the + max context length. + """ + + seed: Optional[int] + """ + This feature is in Beta. If specified, our system will make a best effort to + sample deterministically, such that repeated requests with the same `seed` and + parameters should return the same result. Determinism is not guaranteed, and you + should refer to the `system_fingerprint` response parameter to monitor changes + in the backend. + """ + + stop: Union[Optional[str], List[str]] + """Up to 4 sequences where the API will stop generating further tokens.""" + + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + """ + + tool_choice: ChatCompletionToolChoiceOptionParam + """ + Controls which (if any) tool is called by the model. `none` means the model will + not call any tool and instead generates a message. `auto` means the model can + pick between generating a message or calling one or more tools. `required` means + the model must call one or more tools. Specifying a particular tool via + `{"type": "function", "function": {"name": "my_function"}}` forces the model to + call that tool. + + `none` is the default when no tools are present. `auto` is the default if tools + are present. + """ + + tools: Iterable[ChatCompletionToolParam] + """A list of tools the model may call. + + Currently, only functions are supported as a tool. Use this to provide a list of + functions the model may generate JSON inputs for. A max of 128 functions are + supported. + """ + + top_logprobs: Optional[int] + """ + An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + `logprobs` must be set to `true` if this parameter is used. + """ + + top_p: Optional[float] + """ + An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + """ + + user: str + """ + A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + """ + + +FunctionCall = Union[Literal["none", "auto"], ChatCompletionFunctionCallOptionParam] + + +class Function(TypedDict, total=False): + name: Required[str] + """The name of the function to be called. + + Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length + of 64. + """ + + description: str + """ + A description of what the function does, used by the model to choose when and + how to call the function. + """ + + parameters: shared_params.FunctionParameters + """The parameters the functions accepts, described as a JSON Schema object. + + See the + [guide](https://platform.openai.com/docs/guides/text-generation/function-calling) + for examples, and the + [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + documentation about the format. + + Omitting `parameters` defines a function with an empty parameter list. + """ + + +class ResponseFormat(TypedDict, total=False): + type: Literal["text", "json_object"] + """Must be one of `text` or `json_object`.""" + + +class CompletionCreateParamsNonStreaming(CompletionCreateParamsBase): + stream: Optional[Literal[False]] + """If set, partial message deltas will be sent, like in ChatGPT. + + Tokens will be sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + """ + + +class CompletionCreateParamsStreaming(CompletionCreateParamsBase): + stream: Required[Literal[True]] + """If set, partial message deltas will be sent, like in ChatGPT. + + Tokens will be sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + """ + + +CompletionCreateParams = Union[CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming] diff --git a/.venv/Lib/site-packages/openai/types/chat_model.py b/.venv/Lib/site-packages/openai/types/chat_model.py new file mode 100644 index 00000000..219dab51 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/chat_model.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +__all__ = ["ChatModel"] + +ChatModel = Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0301", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", +] diff --git a/.venv/Lib/site-packages/openai/types/completion.py b/.venv/Lib/site-packages/openai/types/completion.py new file mode 100644 index 00000000..d3b3102a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/completion.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .completion_usage import CompletionUsage +from .completion_choice import CompletionChoice + +__all__ = ["Completion"] + + +class Completion(BaseModel): + id: str + """A unique identifier for the completion.""" + + choices: List[CompletionChoice] + """The list of completion choices the model generated for the input prompt.""" + + created: int + """The Unix timestamp (in seconds) of when the completion was created.""" + + model: str + """The model used for completion.""" + + object: Literal["text_completion"] + """The object type, which is always "text_completion" """ + + system_fingerprint: Optional[str] = None + """This fingerprint represents the backend configuration that the model runs with. + + Can be used in conjunction with the `seed` request parameter to understand when + backend changes have been made that might impact determinism. + """ + + usage: Optional[CompletionUsage] = None + """Usage statistics for the completion request.""" diff --git a/.venv/Lib/site-packages/openai/types/completion_choice.py b/.venv/Lib/site-packages/openai/types/completion_choice.py new file mode 100644 index 00000000..d948ebc9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/completion_choice.py @@ -0,0 +1,35 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["CompletionChoice", "Logprobs"] + + +class Logprobs(BaseModel): + text_offset: Optional[List[int]] = None + + token_logprobs: Optional[List[float]] = None + + tokens: Optional[List[str]] = None + + top_logprobs: Optional[List[Dict[str, float]]] = None + + +class CompletionChoice(BaseModel): + finish_reason: Literal["stop", "length", "content_filter"] + """The reason the model stopped generating tokens. + + This will be `stop` if the model hit a natural stop point or a provided stop + sequence, `length` if the maximum number of tokens specified in the request was + reached, or `content_filter` if content was omitted due to a flag from our + content filters. + """ + + index: int + + logprobs: Optional[Logprobs] = None + + text: str diff --git a/.venv/Lib/site-packages/openai/types/completion_create_params.py b/.venv/Lib/site-packages/openai/types/completion_create_params.py new file mode 100644 index 00000000..36267e90 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/completion_create_params.py @@ -0,0 +1,182 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, List, Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["CompletionCreateParamsBase", "CompletionCreateParamsNonStreaming", "CompletionCreateParamsStreaming"] + + +class CompletionCreateParamsBase(TypedDict, total=False): + model: Required[Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]]] + """ID of the model to use. + + You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + """ + + prompt: Required[Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None]] + """ + The prompt(s) to generate completions for, encoded as a string, array of + strings, array of tokens, or array of token arrays. + + Note that <|endoftext|> is the document separator that the model sees during + training, so if a prompt is not specified the model will generate as if from the + beginning of a new document. + """ + + best_of: Optional[int] + """ + Generates `best_of` completions server-side and returns the "best" (the one with + the highest log probability per token). Results cannot be streamed. + + When used with `n`, `best_of` controls the number of candidate completions and + `n` specifies how many to return – `best_of` must be greater than `n`. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + """ + + echo: Optional[bool] + """Echo back the prompt in addition to the completion""" + + frequency_penalty: Optional[float] + """Number between -2.0 and 2.0. + + Positive values penalize new tokens based on their existing frequency in the + text so far, decreasing the model's likelihood to repeat the same line verbatim. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + """ + + logit_bias: Optional[Dict[str, int]] + """Modify the likelihood of specified tokens appearing in the completion. + + Accepts a JSON object that maps tokens (specified by their token ID in the GPT + tokenizer) to an associated bias value from -100 to 100. You can use this + [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + Mathematically, the bias is added to the logits generated by the model prior to + sampling. The exact effect will vary per model, but values between -1 and 1 + should decrease or increase likelihood of selection; values like -100 or 100 + should result in a ban or exclusive selection of the relevant token. + + As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + from being generated. + """ + + logprobs: Optional[int] + """ + Include the log probabilities on the `logprobs` most likely output tokens, as + well the chosen tokens. For example, if `logprobs` is 5, the API will return a + list of the 5 most likely tokens. The API will always return the `logprob` of + the sampled token, so there may be up to `logprobs+1` elements in the response. + + The maximum value for `logprobs` is 5. + """ + + max_tokens: Optional[int] + """ + The maximum number of [tokens](/tokenizer) that can be generated in the + completion. + + The token count of your prompt plus `max_tokens` cannot exceed the model's + context length. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + """ + + n: Optional[int] + """How many completions to generate for each prompt. + + **Note:** Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have reasonable + settings for `max_tokens` and `stop`. + """ + + presence_penalty: Optional[float] + """Number between -2.0 and 2.0. + + Positive values penalize new tokens based on whether they appear in the text so + far, increasing the model's likelihood to talk about new topics. + + [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation/parameter-details) + """ + + seed: Optional[int] + """ + If specified, our system will make a best effort to sample deterministically, + such that repeated requests with the same `seed` and parameters should return + the same result. + + Determinism is not guaranteed, and you should refer to the `system_fingerprint` + response parameter to monitor changes in the backend. + """ + + stop: Union[Optional[str], List[str], None] + """Up to 4 sequences where the API will stop generating further tokens. + + The returned text will not contain the stop sequence. + """ + + suffix: Optional[str] + """The suffix that comes after a completion of inserted text. + + This parameter is only supported for `gpt-3.5-turbo-instruct`. + """ + + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + + We generally recommend altering this or `top_p` but not both. + """ + + top_p: Optional[float] + """ + An alternative to sampling with temperature, called nucleus sampling, where the + model considers the results of the tokens with top_p probability mass. So 0.1 + means only the tokens comprising the top 10% probability mass are considered. + + We generally recommend altering this or `temperature` but not both. + """ + + user: str + """ + A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + """ + + +class CompletionCreateParamsNonStreaming(CompletionCreateParamsBase): + stream: Optional[Literal[False]] + """Whether to stream back partial progress. + + If set, tokens will be sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + """ + + +class CompletionCreateParamsStreaming(CompletionCreateParamsBase): + stream: Required[Literal[True]] + """Whether to stream back partial progress. + + If set, tokens will be sent as data-only + [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + as they become available, with the stream terminated by a `data: [DONE]` + message. + [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + """ + + +CompletionCreateParams = Union[CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming] diff --git a/.venv/Lib/site-packages/openai/types/completion_usage.py b/.venv/Lib/site-packages/openai/types/completion_usage.py new file mode 100644 index 00000000..0d57b965 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/completion_usage.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from .._models import BaseModel + +__all__ = ["CompletionUsage"] + + +class CompletionUsage(BaseModel): + completion_tokens: int + """Number of tokens in the generated completion.""" + + prompt_tokens: int + """Number of tokens in the prompt.""" + + total_tokens: int + """Total number of tokens used in the request (prompt + completion).""" diff --git a/.venv/Lib/site-packages/openai/types/create_embedding_response.py b/.venv/Lib/site-packages/openai/types/create_embedding_response.py new file mode 100644 index 00000000..eff247a1 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/create_embedding_response.py @@ -0,0 +1,31 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import Literal + +from .._models import BaseModel +from .embedding import Embedding + +__all__ = ["CreateEmbeddingResponse", "Usage"] + + +class Usage(BaseModel): + prompt_tokens: int + """The number of tokens used by the prompt.""" + + total_tokens: int + """The total number of tokens used by the request.""" + + +class CreateEmbeddingResponse(BaseModel): + data: List[Embedding] + """The list of embeddings generated by the model.""" + + model: str + """The name of the model used to generate the embedding.""" + + object: Literal["list"] + """The object type, which is always "list".""" + + usage: Usage + """The usage information for the request.""" diff --git a/.venv/Lib/site-packages/openai/types/embedding.py b/.venv/Lib/site-packages/openai/types/embedding.py new file mode 100644 index 00000000..769b1d16 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/embedding.py @@ -0,0 +1,23 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["Embedding"] + + +class Embedding(BaseModel): + embedding: List[float] + """The embedding vector, which is a list of floats. + + The length of vector depends on the model as listed in the + [embedding guide](https://platform.openai.com/docs/guides/embeddings). + """ + + index: int + """The index of the embedding in the list of embeddings.""" + + object: Literal["embedding"] + """The object type, which is always "embedding".""" diff --git a/.venv/Lib/site-packages/openai/types/embedding_create_params.py b/.venv/Lib/site-packages/openai/types/embedding_create_params.py new file mode 100644 index 00000000..930b3b79 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/embedding_create_params.py @@ -0,0 +1,50 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union, Iterable +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["EmbeddingCreateParams"] + + +class EmbeddingCreateParams(TypedDict, total=False): + input: Required[Union[str, List[str], Iterable[int], Iterable[Iterable[int]]]] + """Input text to embed, encoded as a string or array of tokens. + + To embed multiple inputs in a single request, pass an array of strings or array + of token arrays. The input must not exceed the max input tokens for the model + (8192 tokens for `text-embedding-ada-002`), cannot be an empty string, and any + array must be 2048 dimensions or less. + [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + for counting tokens. + """ + + model: Required[Union[str, Literal["text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large"]]] + """ID of the model to use. + + You can use the + [List models](https://platform.openai.com/docs/api-reference/models/list) API to + see all of your available models, or see our + [Model overview](https://platform.openai.com/docs/models/overview) for + descriptions of them. + """ + + dimensions: int + """The number of dimensions the resulting output embeddings should have. + + Only supported in `text-embedding-3` and later models. + """ + + encoding_format: Literal["float", "base64"] + """The format to return the embeddings in. + + Can be either `float` or [`base64`](https://pypi.org/project/pybase64/). + """ + + user: str + """ + A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + """ diff --git a/.venv/Lib/site-packages/openai/types/file_content.py b/.venv/Lib/site-packages/openai/types/file_content.py new file mode 100644 index 00000000..b4aa08a9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/file_content.py @@ -0,0 +1,6 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + +__all__ = ["FileContent"] + +FileContent = str diff --git a/.venv/Lib/site-packages/openai/types/file_create_params.py b/.venv/Lib/site-packages/openai/types/file_create_params.py new file mode 100644 index 00000000..26e2da33 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/file_create_params.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +from .._types import FileTypes + +__all__ = ["FileCreateParams"] + + +class FileCreateParams(TypedDict, total=False): + file: Required[FileTypes] + """The File object (not file name) to be uploaded.""" + + purpose: Required[Literal["fine-tune", "assistants"]] + """The intended purpose of the uploaded file. + + Use "fine-tune" for + [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning) and + "assistants" for + [Assistants](https://platform.openai.com/docs/api-reference/assistants) and + [Messages](https://platform.openai.com/docs/api-reference/messages). This allows + us to validate the format of the uploaded file is correct for fine-tuning. + """ diff --git a/.venv/Lib/site-packages/openai/types/file_deleted.py b/.venv/Lib/site-packages/openai/types/file_deleted.py new file mode 100644 index 00000000..f25fa87a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/file_deleted.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["FileDeleted"] + + +class FileDeleted(BaseModel): + id: str + + deleted: bool + + object: Literal["file"] diff --git a/.venv/Lib/site-packages/openai/types/file_list_params.py b/.venv/Lib/site-packages/openai/types/file_list_params.py new file mode 100644 index 00000000..212eca13 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/file_list_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["FileListParams"] + + +class FileListParams(TypedDict, total=False): + purpose: str + """Only return files with the given purpose.""" diff --git a/.venv/Lib/site-packages/openai/types/file_object.py b/.venv/Lib/site-packages/openai/types/file_object.py new file mode 100644 index 00000000..589a1faf --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/file_object.py @@ -0,0 +1,46 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["FileObject"] + + +class FileObject(BaseModel): + id: str + """The file identifier, which can be referenced in the API endpoints.""" + + bytes: int + """The size of the file, in bytes.""" + + created_at: int + """The Unix timestamp (in seconds) for when the file was created.""" + + filename: str + """The name of the file.""" + + object: Literal["file"] + """The object type, which is always `file`.""" + + purpose: Literal["fine-tune", "fine-tune-results", "assistants", "assistants_output"] + """The intended purpose of the file. + + Supported values are `fine-tune`, `fine-tune-results`, `assistants`, and + `assistants_output`. + """ + + status: Literal["uploaded", "processed", "error"] + """Deprecated. + + The current status of the file, which can be either `uploaded`, `processed`, or + `error`. + """ + + status_details: Optional[str] = None + """Deprecated. + + For details on why a fine-tuning training file failed validation, see the + `error` field on `fine_tuning.job`. + """ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__init__.py b/.venv/Lib/site-packages/openai/types/fine_tuning/__init__.py new file mode 100644 index 00000000..92b81329 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/__init__.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .fine_tuning_job import FineTuningJob as FineTuningJob +from .job_list_params import JobListParams as JobListParams +from .job_create_params import JobCreateParams as JobCreateParams +from .fine_tuning_job_event import FineTuningJobEvent as FineTuningJobEvent +from .job_list_events_params import JobListEventsParams as JobListEventsParams +from .fine_tuning_job_integration import FineTuningJobIntegration as FineTuningJobIntegration +from .fine_tuning_job_wandb_integration import FineTuningJobWandbIntegration as FineTuningJobWandbIntegration +from .fine_tuning_job_wandb_integration_object import ( + FineTuningJobWandbIntegrationObject as FineTuningJobWandbIntegrationObject, +) diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..b27a1dad Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job.cpython-311.pyc new file mode 100644 index 00000000..1644c1b2 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_event.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_event.cpython-311.pyc new file mode 100644 index 00000000..9490a918 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_event.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_integration.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_integration.cpython-311.pyc new file mode 100644 index 00000000..47282376 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_integration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration.cpython-311.pyc new file mode 100644 index 00000000..e1c1027f Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration_object.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration_object.cpython-311.pyc new file mode 100644 index 00000000..5ab5ad90 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration_object.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_create_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_create_params.cpython-311.pyc new file mode 100644 index 00000000..74d33dcc Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_create_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_list_events_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_list_events_params.cpython-311.pyc new file mode 100644 index 00000000..f267fbf5 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_list_events_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_list_params.cpython-311.pyc new file mode 100644 index 00000000..7918985d Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/__pycache__/job_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job.py b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job.py new file mode 100644 index 00000000..7ac87927 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job.py @@ -0,0 +1,120 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Union, Optional +from typing_extensions import Literal + +from ..._models import BaseModel +from .fine_tuning_job_wandb_integration_object import FineTuningJobWandbIntegrationObject + +__all__ = ["FineTuningJob", "Error", "Hyperparameters"] + + +class Error(BaseModel): + code: str + """A machine-readable error code.""" + + message: str + """A human-readable error message.""" + + param: Optional[str] = None + """The parameter that was invalid, usually `training_file` or `validation_file`. + + This field will be null if the failure was not parameter-specific. + """ + + +class Hyperparameters(BaseModel): + n_epochs: Union[Literal["auto"], int] + """The number of epochs to train the model for. + + An epoch refers to one full cycle through the training dataset. "auto" decides + the optimal number of epochs based on the size of the dataset. If setting the + number manually, we support any number between 1 and 50 epochs. + """ + + +class FineTuningJob(BaseModel): + id: str + """The object identifier, which can be referenced in the API endpoints.""" + + created_at: int + """The Unix timestamp (in seconds) for when the fine-tuning job was created.""" + + error: Optional[Error] = None + """ + For fine-tuning jobs that have `failed`, this will contain more information on + the cause of the failure. + """ + + fine_tuned_model: Optional[str] = None + """The name of the fine-tuned model that is being created. + + The value will be null if the fine-tuning job is still running. + """ + + finished_at: Optional[int] = None + """The Unix timestamp (in seconds) for when the fine-tuning job was finished. + + The value will be null if the fine-tuning job is still running. + """ + + hyperparameters: Hyperparameters + """The hyperparameters used for the fine-tuning job. + + See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + for more details. + """ + + model: str + """The base model that is being fine-tuned.""" + + object: Literal["fine_tuning.job"] + """The object type, which is always "fine_tuning.job".""" + + organization_id: str + """The organization that owns the fine-tuning job.""" + + result_files: List[str] + """The compiled results file ID(s) for the fine-tuning job. + + You can retrieve the results with the + [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + """ + + seed: int + """The seed used for the fine-tuning job.""" + + status: Literal["validating_files", "queued", "running", "succeeded", "failed", "cancelled"] + """ + The current status of the fine-tuning job, which can be either + `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + """ + + trained_tokens: Optional[int] = None + """The total number of billable tokens processed by this fine-tuning job. + + The value will be null if the fine-tuning job is still running. + """ + + training_file: str + """The file ID used for training. + + You can retrieve the training data with the + [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + """ + + validation_file: Optional[str] = None + """The file ID used for validation. + + You can retrieve the validation results with the + [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + """ + + estimated_finish: Optional[int] = None + """ + The Unix timestamp (in seconds) for when the fine-tuning job is estimated to + finish. The value will be null if the fine-tuning job is not running. + """ + + integrations: Optional[List[FineTuningJobWandbIntegrationObject]] = None + """A list of integrations to enable for this fine-tuning job.""" diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_event.py b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_event.py new file mode 100644 index 00000000..2d204bb9 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_event.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["FineTuningJobEvent"] + + +class FineTuningJobEvent(BaseModel): + id: str + + created_at: int + + level: Literal["info", "warn", "error"] + + message: str + + object: Literal["fine_tuning.job.event"] diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_integration.py b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_integration.py new file mode 100644 index 00000000..8076313c --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_integration.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from .fine_tuning_job_wandb_integration_object import FineTuningJobWandbIntegrationObject + +FineTuningJobIntegration = FineTuningJobWandbIntegrationObject diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_wandb_integration.py b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_wandb_integration.py new file mode 100644 index 00000000..4ac282eb --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_wandb_integration.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["FineTuningJobWandbIntegration"] + + +class FineTuningJobWandbIntegration(BaseModel): + project: str + """The name of the project that the new run will be created under.""" + + entity: Optional[str] = None + """The entity to use for the run. + + This allows you to set the team or username of the WandB user that you would + like associated with the run. If not set, the default entity for the registered + WandB API key is used. + """ + + name: Optional[str] = None + """A display name to set for the run. + + If not set, we will use the Job ID as the name. + """ + + tags: Optional[List[str]] = None + """A list of tags to be attached to the newly created run. + + These tags are passed through directly to WandB. Some default tags are generated + by OpenAI: "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + """ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_wandb_integration_object.py b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_wandb_integration_object.py new file mode 100644 index 00000000..5b94354d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/fine_tuning_job_wandb_integration_object.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel +from .fine_tuning_job_wandb_integration import FineTuningJobWandbIntegration + +__all__ = ["FineTuningJobWandbIntegrationObject"] + + +class FineTuningJobWandbIntegrationObject(BaseModel): + type: Literal["wandb"] + """The type of the integration being enabled for the fine-tuning job""" + + wandb: FineTuningJobWandbIntegration + """The settings for your integration with Weights and Biases. + + This payload specifies the project that metrics will be sent to. Optionally, you + can set an explicit display name for your run, add tags to your run, and set a + default entity (team, username, etc) to be associated with your run. + """ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/job_create_params.py b/.venv/Lib/site-packages/openai/types/fine_tuning/job_create_params.py new file mode 100644 index 00000000..1925f90d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/job_create_params.py @@ -0,0 +1,131 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union, Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["JobCreateParams", "Hyperparameters", "Integration", "IntegrationWandb"] + + +class JobCreateParams(TypedDict, total=False): + model: Required[Union[str, Literal["babbage-002", "davinci-002", "gpt-3.5-turbo"]]] + """The name of the model to fine-tune. + + You can select one of the + [supported models](https://platform.openai.com/docs/guides/fine-tuning/what-models-can-be-fine-tuned). + """ + + training_file: Required[str] + """The ID of an uploaded file that contains training data. + + See [upload file](https://platform.openai.com/docs/api-reference/files/create) + for how to upload a file. + + Your dataset must be formatted as a JSONL file. Additionally, you must upload + your file with the purpose `fine-tune`. + + See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + for more details. + """ + + hyperparameters: Hyperparameters + """The hyperparameters used for the fine-tuning job.""" + + integrations: Optional[Iterable[Integration]] + """A list of integrations to enable for your fine-tuning job.""" + + seed: Optional[int] + """The seed controls the reproducibility of the job. + + Passing in the same seed and job parameters should produce the same results, but + may differ in rare cases. If a seed is not specified, one will be generated for + you. + """ + + suffix: Optional[str] + """ + A string of up to 18 characters that will be added to your fine-tuned model + name. + + For example, a `suffix` of "custom-model-name" would produce a model name like + `ft:gpt-3.5-turbo:openai:custom-model-name:7p4lURel`. + """ + + validation_file: Optional[str] + """The ID of an uploaded file that contains validation data. + + If you provide this file, the data is used to generate validation metrics + periodically during fine-tuning. These metrics can be viewed in the fine-tuning + results file. The same data should not be present in both train and validation + files. + + Your dataset must be formatted as a JSONL file. You must upload your file with + the purpose `fine-tune`. + + See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + for more details. + """ + + +class Hyperparameters(TypedDict, total=False): + batch_size: Union[Literal["auto"], int] + """Number of examples in each batch. + + A larger batch size means that model parameters are updated less frequently, but + with lower variance. + """ + + learning_rate_multiplier: Union[Literal["auto"], float] + """Scaling factor for the learning rate. + + A smaller learning rate may be useful to avoid overfitting. + """ + + n_epochs: Union[Literal["auto"], int] + """The number of epochs to train the model for. + + An epoch refers to one full cycle through the training dataset. + """ + + +class IntegrationWandb(TypedDict, total=False): + project: Required[str] + """The name of the project that the new run will be created under.""" + + entity: Optional[str] + """The entity to use for the run. + + This allows you to set the team or username of the WandB user that you would + like associated with the run. If not set, the default entity for the registered + WandB API key is used. + """ + + name: Optional[str] + """A display name to set for the run. + + If not set, we will use the Job ID as the name. + """ + + tags: List[str] + """A list of tags to be attached to the newly created run. + + These tags are passed through directly to WandB. Some default tags are generated + by OpenAI: "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + """ + + +class Integration(TypedDict, total=False): + type: Required[Literal["wandb"]] + """The type of integration to enable. + + Currently, only "wandb" (Weights and Biases) is supported. + """ + + wandb: Required[IntegrationWandb] + """The settings for your integration with Weights and Biases. + + This payload specifies the project that metrics will be sent to. Optionally, you + can set an explicit display name for your run, add tags to your run, and set a + default entity (team, username, etc) to be associated with your run. + """ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/job_list_events_params.py b/.venv/Lib/site-packages/openai/types/fine_tuning/job_list_events_params.py new file mode 100644 index 00000000..e1c9a64d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/job_list_events_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["JobListEventsParams"] + + +class JobListEventsParams(TypedDict, total=False): + after: str + """Identifier for the last event from the previous pagination request.""" + + limit: int + """Number of events to retrieve.""" diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/job_list_params.py b/.venv/Lib/site-packages/openai/types/fine_tuning/job_list_params.py new file mode 100644 index 00000000..5c075ca3 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/job_list_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["JobListParams"] + + +class JobListParams(TypedDict, total=False): + after: str + """Identifier for the last job from the previous pagination request.""" + + limit: int + """Number of fine-tuning jobs to retrieve.""" diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__init__.py b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__init__.py new file mode 100644 index 00000000..6c93da1b --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__init__.py @@ -0,0 +1,6 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .checkpoint_list_params import CheckpointListParams as CheckpointListParams +from .fine_tuning_job_checkpoint import FineTuningJobCheckpoint as FineTuningJobCheckpoint diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..2e9726df Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/checkpoint_list_params.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/checkpoint_list_params.cpython-311.pyc new file mode 100644 index 00000000..1ff44b5b Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/checkpoint_list_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/fine_tuning_job_checkpoint.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/fine_tuning_job_checkpoint.cpython-311.pyc new file mode 100644 index 00000000..1fc1cc7d Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/__pycache__/fine_tuning_job_checkpoint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/checkpoint_list_params.py b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/checkpoint_list_params.py new file mode 100644 index 00000000..adceb3b2 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/checkpoint_list_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["CheckpointListParams"] + + +class CheckpointListParams(TypedDict, total=False): + after: str + """Identifier for the last checkpoint ID from the previous pagination request.""" + + limit: int + """Number of checkpoints to retrieve.""" diff --git a/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/fine_tuning_job_checkpoint.py b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/fine_tuning_job_checkpoint.py new file mode 100644 index 00000000..bd07317a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/fine_tuning/jobs/fine_tuning_job_checkpoint.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["FineTuningJobCheckpoint", "Metrics"] + + +class Metrics(BaseModel): + full_valid_loss: Optional[float] = None + + full_valid_mean_token_accuracy: Optional[float] = None + + step: Optional[float] = None + + train_loss: Optional[float] = None + + train_mean_token_accuracy: Optional[float] = None + + valid_loss: Optional[float] = None + + valid_mean_token_accuracy: Optional[float] = None + + +class FineTuningJobCheckpoint(BaseModel): + id: str + """The checkpoint identifier, which can be referenced in the API endpoints.""" + + created_at: int + """The Unix timestamp (in seconds) for when the checkpoint was created.""" + + fine_tuned_model_checkpoint: str + """The name of the fine-tuned checkpoint model that is created.""" + + fine_tuning_job_id: str + """The name of the fine-tuning job that this checkpoint was created from.""" + + metrics: Metrics + """Metrics at the step number during the fine-tuning job.""" + + object: Literal["fine_tuning.job.checkpoint"] + """The object type, which is always "fine_tuning.job.checkpoint".""" + + step_number: int + """The step number that the checkpoint was created at.""" diff --git a/.venv/Lib/site-packages/openai/types/image.py b/.venv/Lib/site-packages/openai/types/image.py new file mode 100644 index 00000000..f48aa2c7 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/image.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from .._models import BaseModel + +__all__ = ["Image"] + + +class Image(BaseModel): + b64_json: Optional[str] = None + """ + The base64-encoded JSON of the generated image, if `response_format` is + `b64_json`. + """ + + revised_prompt: Optional[str] = None + """ + The prompt that was used to generate the image, if there was any revision to the + prompt. + """ + + url: Optional[str] = None + """The URL of the generated image, if `response_format` is `url` (default).""" diff --git a/.venv/Lib/site-packages/openai/types/image_create_variation_params.py b/.venv/Lib/site-packages/openai/types/image_create_variation_params.py new file mode 100644 index 00000000..25493073 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/image_create_variation_params.py @@ -0,0 +1,50 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Literal, Required, TypedDict + +from .._types import FileTypes + +__all__ = ["ImageCreateVariationParams"] + + +class ImageCreateVariationParams(TypedDict, total=False): + image: Required[FileTypes] + """The image to use as the basis for the variation(s). + + Must be a valid PNG file, less than 4MB, and square. + """ + + model: Union[str, Literal["dall-e-2"], None] + """The model to use for image generation. + + Only `dall-e-2` is supported at this time. + """ + + n: Optional[int] + """The number of images to generate. + + Must be between 1 and 10. For `dall-e-3`, only `n=1` is supported. + """ + + response_format: Optional[Literal["url", "b64_json"]] + """The format in which the generated images are returned. + + Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes after the + image has been generated. + """ + + size: Optional[Literal["256x256", "512x512", "1024x1024"]] + """The size of the generated images. + + Must be one of `256x256`, `512x512`, or `1024x1024`. + """ + + user: str + """ + A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + """ diff --git a/.venv/Lib/site-packages/openai/types/image_edit_params.py b/.venv/Lib/site-packages/openai/types/image_edit_params.py new file mode 100644 index 00000000..073456e3 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/image_edit_params.py @@ -0,0 +1,61 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Literal, Required, TypedDict + +from .._types import FileTypes + +__all__ = ["ImageEditParams"] + + +class ImageEditParams(TypedDict, total=False): + image: Required[FileTypes] + """The image to edit. + + Must be a valid PNG file, less than 4MB, and square. If mask is not provided, + image must have transparency, which will be used as the mask. + """ + + prompt: Required[str] + """A text description of the desired image(s). + + The maximum length is 1000 characters. + """ + + mask: FileTypes + """An additional image whose fully transparent areas (e.g. + + where alpha is zero) indicate where `image` should be edited. Must be a valid + PNG file, less than 4MB, and have the same dimensions as `image`. + """ + + model: Union[str, Literal["dall-e-2"], None] + """The model to use for image generation. + + Only `dall-e-2` is supported at this time. + """ + + n: Optional[int] + """The number of images to generate. Must be between 1 and 10.""" + + response_format: Optional[Literal["url", "b64_json"]] + """The format in which the generated images are returned. + + Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes after the + image has been generated. + """ + + size: Optional[Literal["256x256", "512x512", "1024x1024"]] + """The size of the generated images. + + Must be one of `256x256`, `512x512`, or `1024x1024`. + """ + + user: str + """ + A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + """ diff --git a/.venv/Lib/site-packages/openai/types/image_generate_params.py b/.venv/Lib/site-packages/openai/types/image_generate_params.py new file mode 100644 index 00000000..18c56f8e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/image_generate_params.py @@ -0,0 +1,63 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ImageGenerateParams"] + + +class ImageGenerateParams(TypedDict, total=False): + prompt: Required[str] + """A text description of the desired image(s). + + The maximum length is 1000 characters for `dall-e-2` and 4000 characters for + `dall-e-3`. + """ + + model: Union[str, Literal["dall-e-2", "dall-e-3"], None] + """The model to use for image generation.""" + + n: Optional[int] + """The number of images to generate. + + Must be between 1 and 10. For `dall-e-3`, only `n=1` is supported. + """ + + quality: Literal["standard", "hd"] + """The quality of the image that will be generated. + + `hd` creates images with finer details and greater consistency across the image. + This param is only supported for `dall-e-3`. + """ + + response_format: Optional[Literal["url", "b64_json"]] + """The format in which the generated images are returned. + + Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes after the + image has been generated. + """ + + size: Optional[Literal["256x256", "512x512", "1024x1024", "1792x1024", "1024x1792"]] + """The size of the generated images. + + Must be one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. Must be one + of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3` models. + """ + + style: Optional[Literal["vivid", "natural"]] + """The style of the generated images. + + Must be one of `vivid` or `natural`. Vivid causes the model to lean towards + generating hyper-real and dramatic images. Natural causes the model to produce + more natural, less hyper-real looking images. This param is only supported for + `dall-e-3`. + """ + + user: str + """ + A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. + [Learn more](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids). + """ diff --git a/.venv/Lib/site-packages/openai/types/images_response.py b/.venv/Lib/site-packages/openai/types/images_response.py new file mode 100644 index 00000000..7cee8131 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/images_response.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from .image import Image +from .._models import BaseModel + +__all__ = ["ImagesResponse"] + + +class ImagesResponse(BaseModel): + created: int + + data: List[Image] diff --git a/.venv/Lib/site-packages/openai/types/model.py b/.venv/Lib/site-packages/openai/types/model.py new file mode 100644 index 00000000..2631ee8d --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/model.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["Model"] + + +class Model(BaseModel): + id: str + """The model identifier, which can be referenced in the API endpoints.""" + + created: int + """The Unix timestamp (in seconds) when the model was created.""" + + object: Literal["model"] + """The object type, which is always "model".""" + + owned_by: str + """The organization that owns the model.""" diff --git a/.venv/Lib/site-packages/openai/types/model_deleted.py b/.venv/Lib/site-packages/openai/types/model_deleted.py new file mode 100644 index 00000000..d9a48bb1 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/model_deleted.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + + +from .._models import BaseModel + +__all__ = ["ModelDeleted"] + + +class ModelDeleted(BaseModel): + id: str + + deleted: bool + + object: str diff --git a/.venv/Lib/site-packages/openai/types/moderation.py b/.venv/Lib/site-packages/openai/types/moderation.py new file mode 100644 index 00000000..5aa69182 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/moderation.py @@ -0,0 +1,118 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["Moderation", "Categories", "CategoryScores"] + + +class Categories(BaseModel): + harassment: bool + """ + Content that expresses, incites, or promotes harassing language towards any + target. + """ + + harassment_threatening: bool = FieldInfo(alias="harassment/threatening") + """ + Harassment content that also includes violence or serious harm towards any + target. + """ + + hate: bool + """ + Content that expresses, incites, or promotes hate based on race, gender, + ethnicity, religion, nationality, sexual orientation, disability status, or + caste. Hateful content aimed at non-protected groups (e.g., chess players) is + harassment. + """ + + hate_threatening: bool = FieldInfo(alias="hate/threatening") + """ + Hateful content that also includes violence or serious harm towards the targeted + group based on race, gender, ethnicity, religion, nationality, sexual + orientation, disability status, or caste. + """ + + self_harm: bool = FieldInfo(alias="self-harm") + """ + Content that promotes, encourages, or depicts acts of self-harm, such as + suicide, cutting, and eating disorders. + """ + + self_harm_instructions: bool = FieldInfo(alias="self-harm/instructions") + """ + Content that encourages performing acts of self-harm, such as suicide, cutting, + and eating disorders, or that gives instructions or advice on how to commit such + acts. + """ + + self_harm_intent: bool = FieldInfo(alias="self-harm/intent") + """ + Content where the speaker expresses that they are engaging or intend to engage + in acts of self-harm, such as suicide, cutting, and eating disorders. + """ + + sexual: bool + """ + Content meant to arouse sexual excitement, such as the description of sexual + activity, or that promotes sexual services (excluding sex education and + wellness). + """ + + sexual_minors: bool = FieldInfo(alias="sexual/minors") + """Sexual content that includes an individual who is under 18 years old.""" + + violence: bool + """Content that depicts death, violence, or physical injury.""" + + violence_graphic: bool = FieldInfo(alias="violence/graphic") + """Content that depicts death, violence, or physical injury in graphic detail.""" + + +class CategoryScores(BaseModel): + harassment: float + """The score for the category 'harassment'.""" + + harassment_threatening: float = FieldInfo(alias="harassment/threatening") + """The score for the category 'harassment/threatening'.""" + + hate: float + """The score for the category 'hate'.""" + + hate_threatening: float = FieldInfo(alias="hate/threatening") + """The score for the category 'hate/threatening'.""" + + self_harm: float = FieldInfo(alias="self-harm") + """The score for the category 'self-harm'.""" + + self_harm_instructions: float = FieldInfo(alias="self-harm/instructions") + """The score for the category 'self-harm/instructions'.""" + + self_harm_intent: float = FieldInfo(alias="self-harm/intent") + """The score for the category 'self-harm/intent'.""" + + sexual: float + """The score for the category 'sexual'.""" + + sexual_minors: float = FieldInfo(alias="sexual/minors") + """The score for the category 'sexual/minors'.""" + + violence: float + """The score for the category 'violence'.""" + + violence_graphic: float = FieldInfo(alias="violence/graphic") + """The score for the category 'violence/graphic'.""" + + +class Moderation(BaseModel): + categories: Categories + """A list of the categories, and whether they are flagged or not.""" + + category_scores: CategoryScores + """A list of the categories along with their scores as predicted by model.""" + + flagged: bool + """Whether any of the below categories are flagged.""" diff --git a/.venv/Lib/site-packages/openai/types/moderation_create_params.py b/.venv/Lib/site-packages/openai/types/moderation_create_params.py new file mode 100644 index 00000000..d4608def --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/moderation_create_params.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Union +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ModerationCreateParams"] + + +class ModerationCreateParams(TypedDict, total=False): + input: Required[Union[str, List[str]]] + """The input text to classify""" + + model: Union[str, Literal["text-moderation-latest", "text-moderation-stable"]] + """ + Two content moderations models are available: `text-moderation-stable` and + `text-moderation-latest`. + + The default is `text-moderation-latest` which will be automatically upgraded + over time. This ensures you are always using our most accurate model. If you use + `text-moderation-stable`, we will provide advanced notice before updating the + model. Accuracy of `text-moderation-stable` may be slightly lower than for + `text-moderation-latest`. + """ diff --git a/.venv/Lib/site-packages/openai/types/moderation_create_response.py b/.venv/Lib/site-packages/openai/types/moderation_create_response.py new file mode 100644 index 00000000..79684f8a --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/moderation_create_response.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from .._models import BaseModel +from .moderation import Moderation + +__all__ = ["ModerationCreateResponse"] + + +class ModerationCreateResponse(BaseModel): + id: str + """The unique identifier for the moderation request.""" + + model: str + """The model used to generate the moderation results.""" + + results: List[Moderation] + """A list of moderation objects.""" diff --git a/.venv/Lib/site-packages/openai/types/shared/__init__.py b/.venv/Lib/site-packages/openai/types/shared/__init__.py new file mode 100644 index 00000000..e085744e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/shared/__init__.py @@ -0,0 +1,5 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .error_object import ErrorObject as ErrorObject +from .function_definition import FunctionDefinition as FunctionDefinition +from .function_parameters import FunctionParameters as FunctionParameters diff --git a/.venv/Lib/site-packages/openai/types/shared/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/shared/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..3f979fe3 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/shared/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/shared/__pycache__/error_object.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/shared/__pycache__/error_object.cpython-311.pyc new file mode 100644 index 00000000..ca4ebe62 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/shared/__pycache__/error_object.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/shared/__pycache__/function_definition.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/shared/__pycache__/function_definition.cpython-311.pyc new file mode 100644 index 00000000..344e4555 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/shared/__pycache__/function_definition.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/shared/__pycache__/function_parameters.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/shared/__pycache__/function_parameters.cpython-311.pyc new file mode 100644 index 00000000..ca0c28f0 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/shared/__pycache__/function_parameters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/shared/error_object.py b/.venv/Lib/site-packages/openai/types/shared/error_object.py new file mode 100644 index 00000000..32d7045e --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/shared/error_object.py @@ -0,0 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["ErrorObject"] + + +class ErrorObject(BaseModel): + code: Optional[str] = None + + message: str + + param: Optional[str] = None + + type: str diff --git a/.venv/Lib/site-packages/openai/types/shared/function_definition.py b/.venv/Lib/site-packages/openai/types/shared/function_definition.py new file mode 100644 index 00000000..a39116d6 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/shared/function_definition.py @@ -0,0 +1,35 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel +from .function_parameters import FunctionParameters + +__all__ = ["FunctionDefinition"] + + +class FunctionDefinition(BaseModel): + name: str + """The name of the function to be called. + + Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length + of 64. + """ + + description: Optional[str] = None + """ + A description of what the function does, used by the model to choose when and + how to call the function. + """ + + parameters: Optional[FunctionParameters] = None + """The parameters the functions accepts, described as a JSON Schema object. + + See the + [guide](https://platform.openai.com/docs/guides/text-generation/function-calling) + for examples, and the + [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + documentation about the format. + + Omitting `parameters` defines a function with an empty parameter list. + """ diff --git a/.venv/Lib/site-packages/openai/types/shared/function_parameters.py b/.venv/Lib/site-packages/openai/types/shared/function_parameters.py new file mode 100644 index 00000000..c9524e4c --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/shared/function_parameters.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict + +__all__ = ["FunctionParameters"] + +FunctionParameters = Dict[str, object] diff --git a/.venv/Lib/site-packages/openai/types/shared_params/__init__.py b/.venv/Lib/site-packages/openai/types/shared_params/__init__.py new file mode 100644 index 00000000..ef638cb2 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/shared_params/__init__.py @@ -0,0 +1,4 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .function_definition import FunctionDefinition as FunctionDefinition +from .function_parameters import FunctionParameters as FunctionParameters diff --git a/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..15ad2646 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/function_definition.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/function_definition.cpython-311.pyc new file mode 100644 index 00000000..4c8e7615 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/function_definition.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/function_parameters.cpython-311.pyc b/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/function_parameters.cpython-311.pyc new file mode 100644 index 00000000..8ea08b27 Binary files /dev/null and b/.venv/Lib/site-packages/openai/types/shared_params/__pycache__/function_parameters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/openai/types/shared_params/function_definition.py b/.venv/Lib/site-packages/openai/types/shared_params/function_definition.py new file mode 100644 index 00000000..58d0203b --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/shared_params/function_definition.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +from ...types import shared_params + +__all__ = ["FunctionDefinition"] + + +class FunctionDefinition(TypedDict, total=False): + name: Required[str] + """The name of the function to be called. + + Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length + of 64. + """ + + description: str + """ + A description of what the function does, used by the model to choose when and + how to call the function. + """ + + parameters: shared_params.FunctionParameters + """The parameters the functions accepts, described as a JSON Schema object. + + See the + [guide](https://platform.openai.com/docs/guides/text-generation/function-calling) + for examples, and the + [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + documentation about the format. + + Omitting `parameters` defines a function with an empty parameter list. + """ diff --git a/.venv/Lib/site-packages/openai/types/shared_params/function_parameters.py b/.venv/Lib/site-packages/openai/types/shared_params/function_parameters.py new file mode 100644 index 00000000..5b40efb7 --- /dev/null +++ b/.venv/Lib/site-packages/openai/types/shared_params/function_parameters.py @@ -0,0 +1,9 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict + +__all__ = ["FunctionParameters"] + +FunctionParameters = Dict[str, object] diff --git a/.venv/Lib/site-packages/openai/version.py b/.venv/Lib/site-packages/openai/version.py new file mode 100644 index 00000000..01a08ab5 --- /dev/null +++ b/.venv/Lib/site-packages/openai/version.py @@ -0,0 +1,3 @@ +from ._version import __version__ + +VERSION: str = __version__ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/__init__.cpython-311.pyc index a7cb1a31..421ea65d 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc index fe8f84a1..02213eb1 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc index 2403dc5c..c41d4995 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc index 1a44d8d4..19f222ed 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_parser.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_parser.cpython-311.pyc index 83c223d7..4e4de4c4 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/_parser.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_structures.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_structures.cpython-311.pyc index 1c492daf..1adf9c12 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/_structures.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_tokenizer.cpython-311.pyc index 0ffc7c7e..65d4b735 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/_tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/_tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/markers.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/markers.cpython-311.pyc index c4457443..ba722f74 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/markers.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/markers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/requirements.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/requirements.cpython-311.pyc index b4a6838b..f07d25ac 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/requirements.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/requirements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc index 1bc62fb3..772d313f 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/tags.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/tags.cpython-311.pyc index c38f13c8..47ae4c72 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/tags.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/tags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/utils.cpython-311.pyc index c43e4527..53a5c1ea 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/packaging/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/packaging/__pycache__/version.cpython-311.pyc index 988c8370..ab47a3e8 100644 Binary files a/.venv/Lib/site-packages/packaging/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/__pycache__/__init__.cpython-311.pyc index c94e156e..57e5c102 100644 Binary files a/.venv/Lib/site-packages/pandas/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/__pycache__/_typing.cpython-311.pyc index a2c079bf..926aad6d 100644 Binary files a/.venv/Lib/site-packages/pandas/__pycache__/_typing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/pandas/__pycache__/_version.cpython-311.pyc index 873c4452..61b8371d 100644 Binary files a/.venv/Lib/site-packages/pandas/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/__pycache__/testing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/__pycache__/testing.cpython-311.pyc index 855e81a1..f7e8b889 100644 Binary files a/.venv/Lib/site-packages/pandas/__pycache__/testing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/__pycache__/testing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_config/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_config/__pycache__/__init__.cpython-311.pyc index 769ad925..02899250 100644 Binary files a/.venv/Lib/site-packages/pandas/_config/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_config/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_config/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_config/__pycache__/config.cpython-311.pyc index 9ed7c71e..ffa72c41 100644 Binary files a/.venv/Lib/site-packages/pandas/_config/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_config/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_config/__pycache__/dates.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_config/__pycache__/dates.cpython-311.pyc index 43df836b..67709d31 100644 Binary files a/.venv/Lib/site-packages/pandas/_config/__pycache__/dates.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_config/__pycache__/dates.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_config/__pycache__/display.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_config/__pycache__/display.cpython-311.pyc index e9e5033c..22e61c1b 100644 Binary files a/.venv/Lib/site-packages/pandas/_config/__pycache__/display.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_config/__pycache__/display.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_config/__pycache__/localization.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_config/__pycache__/localization.cpython-311.pyc index 742848d5..0cf3f866 100644 Binary files a/.venv/Lib/site-packages/pandas/_config/__pycache__/localization.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_config/__pycache__/localization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_libs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_libs/__pycache__/__init__.cpython-311.pyc index 58988f91..c5d08704 100644 Binary files a/.venv/Lib/site-packages/pandas/_libs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_libs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_libs/tslibs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_libs/tslibs/__pycache__/__init__.cpython-311.pyc index 9a0faf25..25e6ea55 100644 Binary files a/.venv/Lib/site-packages/pandas/_libs/tslibs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_libs/tslibs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_libs/window/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_libs/window/__pycache__/__init__.cpython-311.pyc index 17555aa0..9f932420 100644 Binary files a/.venv/Lib/site-packages/pandas/_libs/window/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_libs/window/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_testing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_testing/__pycache__/__init__.cpython-311.pyc index 2415f352..d9b032ee 100644 Binary files a/.venv/Lib/site-packages/pandas/_testing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_testing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_testing/__pycache__/_io.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_testing/__pycache__/_io.cpython-311.pyc index e5082c96..e19ef13d 100644 Binary files a/.venv/Lib/site-packages/pandas/_testing/__pycache__/_io.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_testing/__pycache__/_io.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_testing/__pycache__/_random.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_testing/__pycache__/_random.cpython-311.pyc index 2ec73a45..fba614f9 100644 Binary files a/.venv/Lib/site-packages/pandas/_testing/__pycache__/_random.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_testing/__pycache__/_random.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_testing/__pycache__/_warnings.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_testing/__pycache__/_warnings.cpython-311.pyc index 5d653228..0e576bfa 100644 Binary files a/.venv/Lib/site-packages/pandas/_testing/__pycache__/_warnings.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_testing/__pycache__/_warnings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_testing/__pycache__/asserters.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_testing/__pycache__/asserters.cpython-311.pyc index 57549834..f5d7a7f4 100644 Binary files a/.venv/Lib/site-packages/pandas/_testing/__pycache__/asserters.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_testing/__pycache__/asserters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_testing/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_testing/__pycache__/compat.cpython-311.pyc index d649ca56..d8ddf076 100644 Binary files a/.venv/Lib/site-packages/pandas/_testing/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_testing/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/_testing/__pycache__/contexts.cpython-311.pyc b/.venv/Lib/site-packages/pandas/_testing/__pycache__/contexts.cpython-311.pyc index 8e4b7cb4..c462993f 100644 Binary files a/.venv/Lib/site-packages/pandas/_testing/__pycache__/contexts.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/_testing/__pycache__/contexts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/api/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/api/__pycache__/__init__.cpython-311.pyc index 6ac12282..b3604bc3 100644 Binary files a/.venv/Lib/site-packages/pandas/api/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/api/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-311.pyc index cfca0928..db0e39a1 100644 Binary files a/.venv/Lib/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-311.pyc index fdc6c2af..2359d8a1 100644 Binary files a/.venv/Lib/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-311.pyc index 5e9139d9..c642c598 100644 Binary files a/.venv/Lib/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/api/types/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/api/types/__pycache__/__init__.cpython-311.pyc index 87b27f74..0c48f938 100644 Binary files a/.venv/Lib/site-packages/pandas/api/types/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/api/types/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/arrays/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/arrays/__pycache__/__init__.cpython-311.pyc index 3ed90c4e..964c6241 100644 Binary files a/.venv/Lib/site-packages/pandas/arrays/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/arrays/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/compat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/compat/__pycache__/__init__.cpython-311.pyc index 347f9eef..93ba9ec3 100644 Binary files a/.venv/Lib/site-packages/pandas/compat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/compat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/compat/__pycache__/_optional.cpython-311.pyc b/.venv/Lib/site-packages/pandas/compat/__pycache__/_optional.cpython-311.pyc index 28a697bd..e95a6ae7 100644 Binary files a/.venv/Lib/site-packages/pandas/compat/__pycache__/_optional.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/compat/__pycache__/_optional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/compat/__pycache__/chainmap.cpython-311.pyc b/.venv/Lib/site-packages/pandas/compat/__pycache__/chainmap.cpython-311.pyc index 9a9d5f2e..de84be4b 100644 Binary files a/.venv/Lib/site-packages/pandas/compat/__pycache__/chainmap.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/compat/__pycache__/chainmap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-311.pyc b/.venv/Lib/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-311.pyc index 7d46604b..24f07aa9 100644 Binary files a/.venv/Lib/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/compat/__pycache__/pyarrow.cpython-311.pyc b/.venv/Lib/site-packages/pandas/compat/__pycache__/pyarrow.cpython-311.pyc index 90f0bef8..69d69253 100644 Binary files a/.venv/Lib/site-packages/pandas/compat/__pycache__/pyarrow.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/compat/__pycache__/pyarrow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-311.pyc index e02872e9..529f8fe1 100644 Binary files a/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/function.cpython-311.pyc b/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/function.cpython-311.pyc index 204c8c88..ad08f4e2 100644 Binary files a/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/function.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/compat/numpy/__pycache__/function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/__init__.cpython-311.pyc index 7832f58f..696c5088 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/accessor.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/accessor.cpython-311.pyc index 0826076c..5b3b0cf5 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/accessor.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/accessor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/algorithms.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/algorithms.cpython-311.pyc index d212b795..c9c72e3e 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/algorithms.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/algorithms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/api.cpython-311.pyc index 19d5d497..b18b8ee5 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/apply.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/apply.cpython-311.pyc index ad8cae45..6f80e863 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/apply.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/apply.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/arraylike.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/arraylike.cpython-311.pyc index 7c0454dc..d2407db7 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/arraylike.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/arraylike.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/base.cpython-311.pyc index 4690dc52..e85623c9 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/common.cpython-311.pyc index f6c3d9b1..34d050df 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/config_init.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/config_init.cpython-311.pyc index b0701ec1..2f50eb19 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/config_init.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/config_init.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/construction.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/construction.cpython-311.pyc index 554a83e1..e98d20ab 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/construction.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/construction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/describe.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/describe.cpython-311.pyc index dee03d61..956b3f5a 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/describe.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/describe.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/flags.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/flags.cpython-311.pyc index 3ba45111..f9b94301 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/flags.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/flags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/frame.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/frame.cpython-311.pyc index ab258d76..7bdb065d 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/frame.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/frame.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/generic.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/generic.cpython-311.pyc index fd073147..ade9a720 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/generic.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/generic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/indexing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/indexing.cpython-311.pyc index 72b29b7b..a098b944 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/indexing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/indexing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/missing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/missing.cpython-311.pyc index d692d8dd..0960627d 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/missing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/missing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/nanops.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/nanops.cpython-311.pyc index c8564fb4..9f3db763 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/nanops.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/nanops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/roperator.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/roperator.cpython-311.pyc index efde3791..5e28679c 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/roperator.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/roperator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/sample.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/sample.cpython-311.pyc index bd9f58be..a1c5609d 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/sample.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/sample.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/series.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/series.cpython-311.pyc index 2f44c971..bd5fcd98 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/series.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/series.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/shared_docs.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/shared_docs.cpython-311.pyc index 4677affa..d23e7582 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/shared_docs.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/shared_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/__pycache__/sorting.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/__pycache__/sorting.cpython-311.pyc index 99f4eac1..72fae926 100644 Binary files a/.venv/Lib/site-packages/pandas/core/__pycache__/sorting.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/__pycache__/sorting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/__init__.cpython-311.pyc index 8e550abe..61565f34 100644 Binary files a/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/executor.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/executor.cpython-311.pyc index f86210f8..134a46d2 100644 Binary files a/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/executor.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/_numba/__pycache__/executor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/__init__.cpython-311.pyc index 10681531..fd680a36 100644 Binary files a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/masked_reductions.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/masked_reductions.cpython-311.pyc index 4f05ef40..2dca9147 100644 Binary files a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/masked_reductions.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/masked_reductions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/putmask.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/putmask.cpython-311.pyc index 13a45fa0..7b76604b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/putmask.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/putmask.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/quantile.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/quantile.cpython-311.pyc index e3135d9f..d65a156b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/quantile.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/quantile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/replace.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/replace.cpython-311.pyc index 3c73a81c..58ef00b5 100644 Binary files a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/replace.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/replace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/take.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/take.cpython-311.pyc index 0d028927..1763acbe 100644 Binary files a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/take.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/take.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/transforms.cpython-311.pyc index 5ffbe70c..3a1ed935 100644 Binary files a/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/array_algos/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/__init__.cpython-311.pyc index d7a16c29..c3279ab5 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_mixins.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_mixins.cpython-311.pyc index dac7f81a..855ba402 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_mixins.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_mixins.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_ranges.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_ranges.cpython-311.pyc index 2a9705b0..38857193 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_ranges.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/_ranges.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/base.cpython-311.pyc index 8f1bf68c..2332b08b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/boolean.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/boolean.cpython-311.pyc index decd2562..31ef4750 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/boolean.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/boolean.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/categorical.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/categorical.cpython-311.pyc index 36fad369..dfe6da31 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/categorical.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/categorical.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimelike.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimelike.cpython-311.pyc index 4bed81cd..804aa24f 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimelike.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimelike.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimes.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimes.cpython-311.pyc index 01fbd2a6..a76802a9 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimes.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/datetimes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/floating.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/floating.cpython-311.pyc index 5a47b3ea..94442dbe 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/floating.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/floating.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/integer.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/integer.cpython-311.pyc index 02c33465..2361dc66 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/integer.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/integer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/interval.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/interval.cpython-311.pyc index f0d4899f..106fa4c0 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/interval.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/interval.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/masked.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/masked.cpython-311.pyc index fc11610c..aeef6590 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/masked.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/masked.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numeric.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numeric.cpython-311.pyc index 95dac26e..fc92cf13 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numeric.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numeric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numpy_.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numpy_.cpython-311.pyc index 1c167074..edccbfb1 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numpy_.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/numpy_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/period.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/period.cpython-311.pyc index 118ee044..4c1e2c2b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/period.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/period.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_.cpython-311.pyc index 47bd3715..961fb961 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_arrow.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_arrow.cpython-311.pyc index 4e01cc69..03f4730f 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_arrow.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/string_arrow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/timedeltas.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/timedeltas.cpython-311.pyc index ef84dc24..dd1b9d76 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/timedeltas.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/__pycache__/timedeltas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/__init__.cpython-311.pyc index 356cdfed..bb733669 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/array.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/array.cpython-311.pyc index 119b9fcc..01c86898 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/array.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/dtype.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/dtype.cpython-311.pyc index 2dcb2ee5..2a094f96 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/dtype.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/arrow/__pycache__/dtype.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/__init__.cpython-311.pyc index fd6a5d64..451bac56 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/accessor.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/accessor.cpython-311.pyc index 882721ed..29c7230c 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/accessor.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/accessor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/array.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/array.cpython-311.pyc index 376cc9af..11e0ce12 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/array.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/dtype.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/dtype.cpython-311.pyc index 9e6562f4..1be5e2ca 100644 Binary files a/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/dtype.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/arrays/sparse/__pycache__/dtype.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/__init__.cpython-311.pyc index 5a148281..48e12cf0 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/align.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/align.cpython-311.pyc index 88e8a942..bce478ac 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/align.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/align.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/api.cpython-311.pyc index ddfabe93..e44fad79 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/check.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/check.cpython-311.pyc index 5bbc2929..8c19b636 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/check.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/check.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/common.cpython-311.pyc index dc92bea7..c4918d99 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/engines.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/engines.cpython-311.pyc index eebcd9fb..a7548a5e 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/engines.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/engines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/eval.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/eval.cpython-311.pyc index 85209675..95c242a4 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/eval.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/eval.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expr.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expr.cpython-311.pyc index 201ae87b..f068def1 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expr.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expressions.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expressions.cpython-311.pyc index 9ce3fd4f..d7af78a0 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expressions.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/expressions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/ops.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/ops.cpython-311.pyc index 758f94a6..79260cb3 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/ops.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/parsing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/parsing.cpython-311.pyc index 457cd546..37173239 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/parsing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/parsing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/pytables.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/pytables.cpython-311.pyc index 204942b8..1094bbc3 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/pytables.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/pytables.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/scope.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/scope.cpython-311.pyc index 31d17b64..cb0bd6b7 100644 Binary files a/.venv/Lib/site-packages/pandas/core/computation/__pycache__/scope.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/computation/__pycache__/scope.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/__init__.cpython-311.pyc index 343901b2..98c24d16 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/api.cpython-311.pyc index 67d69ed3..53b06d10 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/astype.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/astype.cpython-311.pyc index 50f8fe81..103cb196 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/astype.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/astype.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/base.cpython-311.pyc index 50b40a2d..8070c6f0 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/cast.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/cast.cpython-311.pyc index a644dfbe..04a3bc40 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/cast.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/cast.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/common.cpython-311.pyc index 82c0cf66..2a5ec763 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/concat.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/concat.cpython-311.pyc index 8c33a61c..19b9f1a1 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/concat.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/concat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/dtypes.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/dtypes.cpython-311.pyc index 9892fd2f..1a74b9da 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/dtypes.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/dtypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/generic.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/generic.cpython-311.pyc index 551a1ba7..c35a158a 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/generic.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/generic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/inference.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/inference.cpython-311.pyc index c0d83bef..5ec58c96 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/inference.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/inference.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/missing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/missing.cpython-311.pyc index f798c37e..fafa40ef 100644 Binary files a/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/missing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/dtypes/__pycache__/missing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/__init__.cpython-311.pyc index 021565e9..14747397 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/base.cpython-311.pyc index 732b36b1..439a9df6 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/categorical.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/categorical.cpython-311.pyc index a94b5a3a..f6ae58bc 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/categorical.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/categorical.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/generic.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/generic.cpython-311.pyc index a7248b14..30ab27d3 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/generic.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/generic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-311.pyc index 842550cb..23b5382d 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/grouper.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/grouper.cpython-311.pyc index 6b5b544f..89c97b46 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/grouper.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/grouper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/indexing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/indexing.cpython-311.pyc index 4d841c4c..5412de54 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/indexing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/indexing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/numba_.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/numba_.cpython-311.pyc index 795b2c9c..8893f6e0 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/numba_.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/numba_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/ops.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/ops.cpython-311.pyc index ba017c8a..5dd6f524 100644 Binary files a/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/ops.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/groupby/__pycache__/ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/__init__.cpython-311.pyc index 54ca4882..596f3f93 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/objects.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/objects.cpython-311.pyc index df4f47d9..b4d314ad 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/objects.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/utils.cpython-311.pyc index 0fe27ced..aa86b0d6 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexers/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/__init__.cpython-311.pyc index 02576dec..192677e8 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/accessors.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/accessors.cpython-311.pyc index 2dcd0432..833a7cf5 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/accessors.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/accessors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/api.cpython-311.pyc index f8463941..120e7bde 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/base.cpython-311.pyc index 9abbfca0..fd3398fc 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/category.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/category.cpython-311.pyc index 71f16f40..f8ffa143 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/category.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/category.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimelike.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimelike.cpython-311.pyc index 90b4825a..95191b6e 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimelike.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimelike.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimes.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimes.cpython-311.pyc index e4a9babf..4b72133b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimes.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/datetimes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/extension.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/extension.cpython-311.pyc index 2fa589fb..435c025d 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/extension.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/extension.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/frozen.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/frozen.cpython-311.pyc index 50b18753..bdee250a 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/frozen.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/frozen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/interval.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/interval.cpython-311.pyc index 49c40c6f..deaba2eb 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/interval.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/interval.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/multi.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/multi.cpython-311.pyc index a5efd175..b83fcfee 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/multi.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/multi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/numeric.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/numeric.cpython-311.pyc index b0b726d2..8995604f 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/numeric.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/numeric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/period.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/period.cpython-311.pyc index 26a222ac..795e0f78 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/period.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/period.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/range.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/range.cpython-311.pyc index 6614178e..a2ae6a27 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/range.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/range.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/timedeltas.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/timedeltas.cpython-311.pyc index 9c4e3992..2d3fef1d 100644 Binary files a/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/timedeltas.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/indexes/__pycache__/timedeltas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/__init__.cpython-311.pyc index 289b858f..4c677cf9 100644 Binary files a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/buffer.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/buffer.cpython-311.pyc index 3b2b44fb..a9cf6fca 100644 Binary files a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/buffer.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/buffer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/column.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/column.cpython-311.pyc index ff1506fd..3224d1fd 100644 Binary files a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/column.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/column.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/dataframe_protocol.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/dataframe_protocol.cpython-311.pyc index e42c6609..81531a6e 100644 Binary files a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/dataframe_protocol.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/dataframe_protocol.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/from_dataframe.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/from_dataframe.cpython-311.pyc index d8813df8..cce3ad68 100644 Binary files a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/from_dataframe.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/from_dataframe.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/utils.cpython-311.pyc index cd728ca6..cfce5931 100644 Binary files a/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/interchange/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/__init__.cpython-311.pyc index 00f054b5..9f602405 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/api.cpython-311.pyc index 2c30c423..9706bc72 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/array_manager.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/array_manager.cpython-311.pyc index f466557c..73bb95dc 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/array_manager.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/array_manager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/base.cpython-311.pyc index f122b986..1887e180 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/blocks.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/blocks.cpython-311.pyc index 71e5acc6..b0449e7b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/blocks.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/blocks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/concat.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/concat.cpython-311.pyc index cf38ccf9..fada1349 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/concat.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/concat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/construction.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/construction.cpython-311.pyc index f114d0e1..7870ce51 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/construction.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/construction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/managers.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/managers.cpython-311.pyc index 8e0be47b..2380f755 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/managers.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/managers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/ops.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/ops.cpython-311.pyc index ca7085f3..aa4cd826 100644 Binary files a/.venv/Lib/site-packages/pandas/core/internals/__pycache__/ops.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/internals/__pycache__/ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/__init__.cpython-311.pyc index 068abd60..0d53af49 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/array_ops.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/array_ops.cpython-311.pyc index 5968d0c2..22071e16 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/array_ops.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/array_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/common.cpython-311.pyc index 93f2564a..aed255d7 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/dispatch.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/dispatch.cpython-311.pyc index 146cc262..8195a4b3 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/dispatch.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/dispatch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/docstrings.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/docstrings.cpython-311.pyc index 13500d7c..3eacd78a 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/docstrings.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/docstrings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/invalid.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/invalid.cpython-311.pyc index 2bc45acc..4c3dc5bc 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/invalid.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/invalid.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/mask_ops.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/mask_ops.cpython-311.pyc index fa9f8d35..997b1742 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/mask_ops.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/mask_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/methods.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/methods.cpython-311.pyc index 999a17fb..253e9c2e 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/methods.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/methods.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/missing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/missing.cpython-311.pyc index 71e55bff..24d8b34e 100644 Binary files a/.venv/Lib/site-packages/pandas/core/ops/__pycache__/missing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/ops/__pycache__/missing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/__init__.cpython-311.pyc index 7858ce9c..ea1afdf3 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/api.cpython-311.pyc index 2913abd5..05e9da5a 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/concat.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/concat.cpython-311.pyc index a95bb184..fb328ae6 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/concat.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/concat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/encoding.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/encoding.cpython-311.pyc index 3d370220..94afb2da 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/encoding.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/encoding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/melt.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/melt.cpython-311.pyc index f8a8d030..22316707 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/melt.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/melt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/merge.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/merge.cpython-311.pyc index cd751565..b1127ce5 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/merge.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/merge.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/pivot.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/pivot.cpython-311.pyc index 74afd167..f681a4a7 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/pivot.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/pivot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/tile.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/tile.cpython-311.pyc index 15e810a2..28782f65 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/tile.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/tile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/util.cpython-311.pyc index c7c2e7c9..b49b194b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/reshape/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/__init__.cpython-311.pyc index dbb9463d..cfecc78b 100644 Binary files a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/accessor.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/accessor.cpython-311.pyc index ab25dd6d..a756ac9a 100644 Binary files a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/accessor.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/accessor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/base.cpython-311.pyc index bc2123be..692f89dc 100644 Binary files a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/object_array.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/object_array.cpython-311.pyc index 79d8df85..3fe800d1 100644 Binary files a/.venv/Lib/site-packages/pandas/core/strings/__pycache__/object_array.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/strings/__pycache__/object_array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/__init__.cpython-311.pyc index 41700e39..4d6c5a22 100644 Binary files a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/datetimes.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/datetimes.cpython-311.pyc index 22527246..1c145293 100644 Binary files a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/datetimes.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/datetimes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/numeric.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/numeric.cpython-311.pyc index 0f307e92..f48c0d59 100644 Binary files a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/numeric.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/numeric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/timedeltas.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/timedeltas.cpython-311.pyc index 9c2e61d6..0f43cfe3 100644 Binary files a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/timedeltas.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/timedeltas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/times.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/times.cpython-311.pyc index 9e4aa4cf..1acb076e 100644 Binary files a/.venv/Lib/site-packages/pandas/core/tools/__pycache__/times.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/tools/__pycache__/times.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/util/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/util/__pycache__/__init__.cpython-311.pyc index 572097de..a9ff01f1 100644 Binary files a/.venv/Lib/site-packages/pandas/core/util/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/util/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/util/__pycache__/hashing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/util/__pycache__/hashing.cpython-311.pyc index 25cfb853..e77e9a64 100644 Binary files a/.venv/Lib/site-packages/pandas/core/util/__pycache__/hashing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/util/__pycache__/hashing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/util/__pycache__/numba_.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/util/__pycache__/numba_.cpython-311.pyc index 43d75053..249569a6 100644 Binary files a/.venv/Lib/site-packages/pandas/core/util/__pycache__/numba_.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/util/__pycache__/numba_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/__init__.cpython-311.pyc index 21a7dd7d..694c9437 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/common.cpython-311.pyc index 06138e7a..dc351756 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/doc.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/doc.cpython-311.pyc index 4da6eb7d..ee7f1236 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/doc.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/doc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/ewm.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/ewm.cpython-311.pyc index 0b89e303..428798f3 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/ewm.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/ewm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/expanding.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/expanding.cpython-311.pyc index db47ae8b..8d0dc933 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/expanding.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/expanding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/numba_.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/numba_.cpython-311.pyc index c3ea8df0..711373c7 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/numba_.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/numba_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/online.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/online.cpython-311.pyc index 47dd03d9..65d98325 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/online.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/online.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/core/window/__pycache__/rolling.cpython-311.pyc b/.venv/Lib/site-packages/pandas/core/window/__pycache__/rolling.cpython-311.pyc index 90c74997..d0297885 100644 Binary files a/.venv/Lib/site-packages/pandas/core/window/__pycache__/rolling.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/core/window/__pycache__/rolling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/errors/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/errors/__pycache__/__init__.cpython-311.pyc index 732f7167..f2dc52c2 100644 Binary files a/.venv/Lib/site-packages/pandas/errors/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/errors/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/__init__.cpython-311.pyc index c9ba889e..bac4955e 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/api.cpython-311.pyc index 92907933..29aaab30 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/clipboards.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/clipboards.cpython-311.pyc index 9036a41c..a1e940ca 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/clipboards.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/clipboards.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/common.cpython-311.pyc index 572651af..0c6a22eb 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/date_converters.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/date_converters.cpython-311.pyc index e5b9f6a7..737668ae 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/date_converters.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/date_converters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/feather_format.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/feather_format.cpython-311.pyc index 7e852252..c90d8b6f 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/feather_format.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/feather_format.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/gbq.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/gbq.cpython-311.pyc index 76d090cc..51bc7b2b 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/gbq.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/gbq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/html.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/html.cpython-311.pyc index 4dedd0ad..fef65b30 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/html.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/html.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/orc.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/orc.cpython-311.pyc index 28c62035..2bbe23df 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/orc.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/orc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/parquet.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/parquet.cpython-311.pyc index 79fb5aba..b46a6c43 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/parquet.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/parquet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/pickle.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/pickle.cpython-311.pyc index 8fce759e..dc7d560a 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/pickle.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/pickle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/pytables.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/pytables.cpython-311.pyc index ef482aff..e7224707 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/pytables.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/pytables.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/spss.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/spss.cpython-311.pyc index 9a0a82ca..b3b63a82 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/spss.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/spss.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/sql.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/sql.cpython-311.pyc index b416cef1..33d7e242 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/sql.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/sql.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/stata.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/stata.cpython-311.pyc index ad9fec22..76df8903 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/stata.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/stata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/__pycache__/xml.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/__pycache__/xml.cpython-311.pyc index 5cbf69c4..039a16f5 100644 Binary files a/.venv/Lib/site-packages/pandas/io/__pycache__/xml.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/__pycache__/xml.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/__init__.cpython-311.pyc index 10212c6d..16c2d356 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_base.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_base.cpython-311.pyc index c38d8a7b..63d279a8 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_base.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odfreader.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odfreader.cpython-311.pyc index 97ed50db..bf25653d 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odfreader.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odfreader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odswriter.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odswriter.cpython-311.pyc index 5fe89af7..eecba675 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odswriter.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_odswriter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_openpyxl.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_openpyxl.cpython-311.pyc index 04bf75b7..06ec86ed 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_openpyxl.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_openpyxl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_pyxlsb.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_pyxlsb.cpython-311.pyc index 19d9bc5a..8683d212 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_pyxlsb.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_pyxlsb.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_util.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_util.cpython-311.pyc index 457fad9f..caf9076c 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_util.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlrd.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlrd.cpython-311.pyc index 36096347..feb6cc07 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlrd.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlrd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlsxwriter.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlsxwriter.cpython-311.pyc index b6980090..4a87e4a6 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlsxwriter.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlsxwriter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlwt.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlwt.cpython-311.pyc index 7fd0ff47..fd1c4f24 100644 Binary files a/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlwt.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/excel/__pycache__/_xlwt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/__init__.cpython-311.pyc index f24facba..e1ff41a9 100644 Binary files a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/console.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/console.cpython-311.pyc index 30681606..b6540f23 100644 Binary files a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/console.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/console.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/format.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/format.cpython-311.pyc index 1ada8fc2..0275dd27 100644 Binary files a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/format.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/format.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/info.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/info.cpython-311.pyc index 37d74d64..b79d9108 100644 Binary files a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/info.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/printing.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/printing.cpython-311.pyc index 9cbb4839..80c464b2 100644 Binary files a/.venv/Lib/site-packages/pandas/io/formats/__pycache__/printing.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/formats/__pycache__/printing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/json/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/json/__pycache__/__init__.cpython-311.pyc index 5f739f51..91acc6ee 100644 Binary files a/.venv/Lib/site-packages/pandas/io/json/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/json/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/json/__pycache__/_json.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/json/__pycache__/_json.cpython-311.pyc index 6ec334dd..2cefeeae 100644 Binary files a/.venv/Lib/site-packages/pandas/io/json/__pycache__/_json.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/json/__pycache__/_json.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/json/__pycache__/_normalize.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/json/__pycache__/_normalize.cpython-311.pyc index c99bcbb1..f110fed6 100644 Binary files a/.venv/Lib/site-packages/pandas/io/json/__pycache__/_normalize.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/json/__pycache__/_normalize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/json/__pycache__/_table_schema.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/json/__pycache__/_table_schema.cpython-311.pyc index aeaa6f2d..6f0d0ef1 100644 Binary files a/.venv/Lib/site-packages/pandas/io/json/__pycache__/_table_schema.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/json/__pycache__/_table_schema.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/__init__.cpython-311.pyc index c5c68d53..c839e873 100644 Binary files a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/arrow_parser_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/arrow_parser_wrapper.cpython-311.pyc index ed6cbda3..2a006d84 100644 Binary files a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/arrow_parser_wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/arrow_parser_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/base_parser.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/base_parser.cpython-311.pyc index c6e5b9d1..a0540a4c 100644 Binary files a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/base_parser.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/base_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/c_parser_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/c_parser_wrapper.cpython-311.pyc index 7d7074ee..89b9657d 100644 Binary files a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/c_parser_wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/c_parser_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/python_parser.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/python_parser.cpython-311.pyc index e8e9823f..dc8e4f1b 100644 Binary files a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/python_parser.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/python_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/readers.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/readers.cpython-311.pyc index 4befdb4d..94426b59 100644 Binary files a/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/readers.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/parsers/__pycache__/readers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/sas/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/sas/__pycache__/__init__.cpython-311.pyc index da488983..7de18286 100644 Binary files a/.venv/Lib/site-packages/pandas/io/sas/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/sas/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/io/sas/__pycache__/sasreader.cpython-311.pyc b/.venv/Lib/site-packages/pandas/io/sas/__pycache__/sasreader.cpython-311.pyc index 86e9623c..d2d08864 100644 Binary files a/.venv/Lib/site-packages/pandas/io/sas/__pycache__/sasreader.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/io/sas/__pycache__/sasreader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/plotting/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/plotting/__pycache__/__init__.cpython-311.pyc index b460b701..9d78f4db 100644 Binary files a/.venv/Lib/site-packages/pandas/plotting/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/plotting/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/plotting/__pycache__/_core.cpython-311.pyc b/.venv/Lib/site-packages/pandas/plotting/__pycache__/_core.cpython-311.pyc index 2c78c2ed..222c6989 100644 Binary files a/.venv/Lib/site-packages/pandas/plotting/__pycache__/_core.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/plotting/__pycache__/_core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/plotting/__pycache__/_misc.cpython-311.pyc b/.venv/Lib/site-packages/pandas/plotting/__pycache__/_misc.cpython-311.pyc index d3b0a9d0..afa779ae 100644 Binary files a/.venv/Lib/site-packages/pandas/plotting/__pycache__/_misc.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/plotting/__pycache__/_misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/tseries/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/tseries/__pycache__/__init__.cpython-311.pyc index fdfd1b37..8b400534 100644 Binary files a/.venv/Lib/site-packages/pandas/tseries/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/tseries/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/tseries/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pandas/tseries/__pycache__/api.cpython-311.pyc index c9ab5b5d..430b0cbc 100644 Binary files a/.venv/Lib/site-packages/pandas/tseries/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/tseries/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/tseries/__pycache__/frequencies.cpython-311.pyc b/.venv/Lib/site-packages/pandas/tseries/__pycache__/frequencies.cpython-311.pyc index 5c10b3aa..7f56ac16 100644 Binary files a/.venv/Lib/site-packages/pandas/tseries/__pycache__/frequencies.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/tseries/__pycache__/frequencies.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/tseries/__pycache__/offsets.cpython-311.pyc b/.venv/Lib/site-packages/pandas/tseries/__pycache__/offsets.cpython-311.pyc index 46542857..1f8c6d85 100644 Binary files a/.venv/Lib/site-packages/pandas/tseries/__pycache__/offsets.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/tseries/__pycache__/offsets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/util/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/util/__pycache__/__init__.cpython-311.pyc index 29255e77..7e9f3852 100644 Binary files a/.venv/Lib/site-packages/pandas/util/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/util/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/util/__pycache__/_decorators.cpython-311.pyc b/.venv/Lib/site-packages/pandas/util/__pycache__/_decorators.cpython-311.pyc index b5ab239b..1b12440e 100644 Binary files a/.venv/Lib/site-packages/pandas/util/__pycache__/_decorators.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/util/__pycache__/_decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/util/__pycache__/_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pandas/util/__pycache__/_exceptions.cpython-311.pyc index 0112cfd0..2eb83ac7 100644 Binary files a/.venv/Lib/site-packages/pandas/util/__pycache__/_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/util/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/util/__pycache__/_print_versions.cpython-311.pyc b/.venv/Lib/site-packages/pandas/util/__pycache__/_print_versions.cpython-311.pyc index a4afaa2c..b4462079 100644 Binary files a/.venv/Lib/site-packages/pandas/util/__pycache__/_print_versions.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/util/__pycache__/_print_versions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/util/__pycache__/_tester.cpython-311.pyc b/.venv/Lib/site-packages/pandas/util/__pycache__/_tester.cpython-311.pyc index 727230b9..7fa37ace 100644 Binary files a/.venv/Lib/site-packages/pandas/util/__pycache__/_tester.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/util/__pycache__/_tester.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/util/__pycache__/_validators.cpython-311.pyc b/.venv/Lib/site-packages/pandas/util/__pycache__/_validators.cpython-311.pyc index 8ae6d579..22de4966 100644 Binary files a/.venv/Lib/site-packages/pandas/util/__pycache__/_validators.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/util/__pycache__/_validators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pandas/util/version/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pandas/util/version/__pycache__/__init__.cpython-311.pyc index 1ec169de..abffa932 100644 Binary files a/.venv/Lib/site-packages/pandas/util/version/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pandas/util/version/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/__pycache__/__init__.cpython-311.pyc index de2e5549..9b4c2b8c 100644 Binary files a/.venv/Lib/site-packages/pip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc index 82400362..c32f03c4 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc index da3e3f30..c880f2fe 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc index 13abe026..a56476e3 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc index 95adeee3..a9285099 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc index 81b2afcf..9c60bf95 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc index 72758a5f..7ecbd74a 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc index 113948b7..db570e1c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc index e4da6be6..fc47236b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc index 4c64f83a..15cb0368 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc index 98cc4519..262b3fee 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc index 0370793e..d50c7b9c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc index db3c0944..c0216726 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc index 67e613a2..9435f9c2 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc index ca703bba..a96eb3da 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc index 7ada7fc7..71fb4344 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc index 4e4c9342..5c4644b3 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc index 28cb5a0f..1c6e3a13 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc index 3bae1608..d82191d1 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc index ab13481d..e6f15660 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc index 55b97efb..68384b8b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc index 54b19efc..6f45363b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc index b27c7b08..3841c59b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc index fe3a5261..00f35e77 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc index 1ca93f58..63856737 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc index 8d7773ac..dfce8ddb 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc index eeed8f28..579faa2e 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc index 326f13c4..3a0bddf1 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc index b62ae063..ef0ba2c4 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc index 352b86c6..ad990330 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc index b52437f2..612bc2e2 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc index e9faa80a..ce42f795 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc index 238502bf..1b649c58 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc index 4eb4a26c..664633cd 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc index d0290165..31733227 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc index 34b82bd4..a4265e24 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc index 9df30f95..a70015fe 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc index 347ea664..15b3066c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc index 68287789..53db3702 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc index 535e9b5f..7bfdad3a 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc index 7d90ebd7..50c61b57 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc index 4b0e870f..db37aca6 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc index 39a93978..21e9b994 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc index a28730e7..29a9b4f3 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc index a62cc0f6..677963ef 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc index a4f14c8c..4946d846 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc index 882c4a5f..afc59141 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc index 603b62c7..09a3a74b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc index 55cebdff..1aaa6ce3 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc index c47e117b..47f647e4 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc index de21b35c..0c9775ba 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc index 7421fdb2..5a8c966f 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc index 89a9cb31..c47fe5c8 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc index 202dc8e7..5b3fe1bd 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc index ec57a089..5aa3598c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc index 395151bb..b1583dfe 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc index e6252eb9..8f06efe2 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc index b327d033..e590e35c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc index de9f7071..83c8b788 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc index f3a5775c..cde33003 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc index e65fd40e..3ea35ffc 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc index ae91c482..aa1af303 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc index 9df41beb..1bd53e3e 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc index 472039ca..56ae1505 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc index 4c7705ba..96f41122 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc index 447d39fd..245f2cee 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc index a90efb19..69a6d983 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc index 81634695..357a1b2e 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc index fcb08175..94497737 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc index 609f4eed..94baa9ef 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc index 9724ca61..535d9816 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc index 9e27f61e..f47df798 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc index 6ad0839e..b8a0d788 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc index 44bd62ca..a5dd89ea 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc index 18c0eaad..a1a39b31 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc index a429d31b..a6e397cf 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc index 9330ad0d..4c6f2ea0 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc index 520bec89..71f31a7a 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc index c5a32dfb..e3de73ca 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc index a95eec7c..1652f19c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc index dccc3e64..fc128ffb 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc index 25424a22..57f96846 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc index 9f54fa4d..15adfefb 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc index da9e4f17..8e99300c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc index ffcd45f7..7bdfd7a8 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc index cf815af0..328e0d25 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc index 5d683f6c..6a513ff6 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc index ee253b08..18f332d5 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc index 0513a450..699ecc0e 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc index 68d1b812..444d688e 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc index 397e2c6b..b99a13bc 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc index 75eb0b3a..50a9ca61 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc index 6e8c36f3..e466a270 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc index f7ad5943..55dd9fc7 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc index 0f6b4d85..c513badc 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc index af9c6a21..16f9156e 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc index 6fdea519..68f582c9 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc index e2a82db4..bb6f463b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc index e9e927eb..f04b9c9f 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc index b045ee0a..9d17a1c6 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc index c2ca9e5a..7059a47c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc index 6a6477ad..408b5a97 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc index 27c2aab8..bb9c8b72 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc index 1cda2b31..dd1085f0 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc index 09b6442f..e6079ec5 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc index d102ae78..21386445 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc index ad56e3dc..5f061771 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc index 2a501dc0..b7c348b5 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc index 87950875..a20496f2 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc index 37f29eb7..821d0081 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc index ca4584ed..748ce9ff 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc index 8e99c20e..43cb0f19 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc index 2d650327..51cdf3d2 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc index b7fb21e2..42e40ff9 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc index f433a77e..92a34e01 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc index b90d3911..ab036168 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc index 9aa5be8d..e0c65ffe 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc index c0209031..ca93582b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc index 50e312d1..92ec32aa 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc index 4af5ac66..59b47976 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc index 8899cd8d..4328ee5c 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc index f88185eb..9090f91b 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc index 60fdf233..54682020 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc index c9341756..3ab04cf3 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc index 373fbf58..33f9e6d1 100644 Binary files a/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc index 69e8493e..79130341 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc index 059f3551..0b2c5230 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc index d5b2e83b..9c7db455 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc index e4e4e599..183e17fc 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc index 3cf3ab43..aaa618a0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc index f7e5b952..655d647b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc index 77583f55..f058ae9d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc index 77a0e2ba..97208429 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc index 9d927500..71e96487 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc index bcede261..1e679806 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc index 36e62749..9eff7fd0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc index c378ca39..cc95f995 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc index a3e40e22..2f8ba02b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc index 7786e7cc..31551413 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc index 3e15c2ca..d0eea809 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc index 3fb3d417..b08e6898 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc index 14dd884a..4352de0a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc index e750cfd1..7e8ae219 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc index b7ed1be1..6a6871f1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc index 02d22de1..a8185998 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc index 48e43ce2..b7709f02 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc index 0934a3f1..8b84c6ea 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc index ddcf1cdd..bdb9b111 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc index de56db01..8652664c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc index 2d1a315f..dccef5c5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc index 16a2fddc..bfa7994e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc index 75d9342e..5cbe0ab5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc index 0145e8f4..00ae4f25 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc index c9cd116b..1b56dd2a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc index 53f2532c..cd2edda5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc index 4c228129..a5f1786c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc index cbda01ad..89a9d22b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc index 1a4a0578..8cb9066d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc index e157d7c6..e93b8d10 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc index a771f3ec..7c7d3ae6 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc index a9baded3..ae16651e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc index 9ade6677..d74eceab 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc index 81e41225..3f5619d7 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc index 050d4c27..2132e162 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc index c66bebf7..c4e6f4ed 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc index 9b3905b6..212b0065 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc index 4facb9a4..2c757463 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc index 84edc0d7..f6be6449 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc index e88f2ece..cf923c5b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc index f2686fbd..b790d217 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc index 317bad59..621390a4 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc index 40145f90..48988b11 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc index bb1e7710..1d50ab0a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc index 325501d2..33a8224a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc index 64a9fcfa..81a6bfce 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc index fc8a9538..0d42271e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc index 5c10b5ad..8beb3bc3 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc index 2d04c5ec..922fa49e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc index e2d11ab2..c49be8c8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc index 447e3aa8..ce6d6dbd 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc index 14437975..b39414b1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc index 91f0e0f2..a79c98c6 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc index fad02d3a..42adf912 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc index 776c38d6..fb5cc2de 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc index cf483e35..1b1cf2b5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc index b9e92109..d439e4e9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc index c05d4c3a..3ebc3eb1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc index e29858b4..6f6bfad9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc index 4a5e2d78..4ac90d8f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc index bb6c2fd1..a73f200f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc index ba96648a..8cfaf1c2 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc index cb736050..44412e70 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc index e630e00c..83ed2c87 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc index f3bc2e53..92b3dfad 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc index 964c6737..5330e1dd 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc index f001b657..8e08b147 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc index de05b104..6c374c18 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc index c7dc494e..0f87785a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc index abbc3866..bfa0177d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc index 923a2654..6675d6ec 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc index d09e347a..cc554cff 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc index 45a5dc9d..7ced6896 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc index 6b0f7cbd..53b181b3 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc index 0bee20e4..cb845f9b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc index ce47bb28..1bd0d171 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc index 210c17f5..be718ee5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc index be5da6e6..999406d6 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc index 7c78419d..7e0cafbe 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc index ebabbdc6..9124c5f2 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc index e090a128..e2fdefab 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc index 84b99f2e..69741aa0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc index 1ceffd59..9cdc2efa 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc index b862dbbb..d5c235e7 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc index 9c103677..87c92b5f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc index 3b7c960c..2f8b50b9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc index c032cff7..74ef1b10 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc index 2657a337..932e544c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc index 1ae979ca..ec3365d2 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc index 91e3b2d4..39623fab 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc index e3f6ad98..0c91bb63 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc index 09023f77..8727b046 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc index 3ef2bf91..047252e0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc index 6aaa35ab..9c663e43 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc index e8492b92..560ad42a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc index 268ced73..546d2ab9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc index 48330d84..cf6e5f82 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc index d9db85bd..b848ca97 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc index 7b728155..81723aa5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc index 24770b6c..2444bb80 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc index 25372752..c6e90259 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc index abe041ec..7bec3f41 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc index 5ab96daa..ddfb455a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc index 180d6491..a17fd331 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc index 8959cf04..86b1f7dd 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc index fa027740..fdbb320d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc index 780002ed..6b327b64 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc index 46000aa8..11d75b1e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc index 77b132d7..cfd3e2a0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc index 3f90558b..891670d8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc index b079d58f..d885f5d9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc index a4270a5a..1bd58242 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc index de61f5ba..98fb2d07 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc index dff3af49..b53752d9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc index 0f0017e0..7df16c04 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc index ca283e58..fc8f2e18 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc index e8d2fe8f..09658348 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc index b774abe3..f989e89d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc index 73e85888..1c2140ea 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc index 60ad99bf..6d3246b8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc index ba360c1c..800e991e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc index dc47ad01..1a6563e0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc index 3ab0c78c..013c8fc2 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc index c3ff0ef7..4a0a3275 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc index b89f9591..f900610c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc index 311928b7..235b2635 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc index 67f971af..cef02ded 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc index f11d4d71..dfef30d1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc index 8f11ff9e..bcc2b6a6 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc index 27888267..cee0e45c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc index 267d130e..c7d6a28d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc index 343941e9..a8b37766 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc index f31fd0a4..cafa778c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc index 39771bee..e2570b99 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc index 811f0969..95645990 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc index fffd6f8d..d6a772f7 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc index f26d388a..6798fc8b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc index bdadd151..319bef45 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc index 3bc93e6f..350eb5a9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc index 94e89ba3..4d461155 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc index 26ad628b..122d4b32 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc index ff3c2b1b..301c6ec4 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc index cf500d38..d671ccf8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc index 4db4dc9b..9f5feed7 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc index 29666f51..d4fa6f92 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc index b0264b5e..30ee13c1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc index faee4357..2a4d1583 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc index ed044684..54c79220 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc index bbb4257b..39cc73cf 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc index 571224e7..eeb4e2ec 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc index e5036d91..646da4a1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc index ecd7050d..b1d2c336 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc index 5ef5225e..d03e8f5f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc index 32deac8c..d2ea022f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc index 70f7a752..3377b82f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc index aab6919f..922c52af 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc index 7b52e0f0..112fab38 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc index 9236ba8d..3d25238b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc index af1f13e0..b9e19b4e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc index 0b4636de..96815279 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc index 8d87ca7a..8c876970 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc index aa1abaa0..df45f6c0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc index 6022407f..658905dc 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc index b4aa0a32..e6048eb0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc index 3164f419..a3889513 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc index b2fa26b2..809c8574 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc index fd90414f..3c869b41 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc index 42b65766..f52a7473 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc index 40126967..b49e2432 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc index c58d621b..fd0d6db7 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc index 8f68977a..336e71f5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc index dae04d35..7fffb969 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc index 74ce5196..0d14bcea 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc index b40165d2..e520335d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc index b0d5bbf4..34ed230c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc index b893cd92..d960c9c8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc index 0ece5e74..8394f6a4 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc index 64d7172a..9edf0ad4 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc index 1bbca557..c93a4ac1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc index 664e0e00..66d58615 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc index c978beea..42a93898 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc index e88d88c1..9f1de3cc 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc index 743b8881..085633fe 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc index 8a585d90..7bdbd361 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc index c6be5c12..d4590bd7 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc index 355798cf..ebe76475 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc index 2feb0a90..586a22f9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc index ba94bc99..f7a72c35 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc index 9e2ccd49..58aba5b6 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc index 048b1ffd..c5d2abb1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc index 347dd1b9..d5051e43 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc index 8a71f590..7c86bae6 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc index fb728df6..a741d2d2 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc index 2ec260f4..19693d80 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc index 3af14906..530d0b41 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc index 9896e41b..1db45edf 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc index 66c26157..1606b01a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc index f00d9158..9502b294 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc index cfb0863a..89eb46b9 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc index e1c3c982..e4c528d1 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc index 009870b3..cbf89615 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc index 74e17e5e..8b1f4c39 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc index f9e827e2..70af64de 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc index e93f9b1c..19bcd27d 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc index d6e28070..515867c0 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc index 4f51a6b9..433a3990 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc index b65483b0..dc94be2b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc index d56c5250..470218a2 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc index c8ff3118..1e97d6b8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc index 4b7b10ce..399360a7 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc index df2d74f3..8f2f1f52 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc index cc19cec5..e8695cbb 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc index 867a4c2a..4cf2dc04 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc index 11c119cb..a232607a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc index bde60989..3144a664 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc index e4aecad9..3bcd7925 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc index 8d4b3715..65d8816e 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc index 7ae4a50a..9b6bd840 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc index ef71e844..b5d9204f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc index 34b104ff..c680bfee 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc index b6028584..ed94d131 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc index 01ae2eea..c86ec15f 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc index 51e84f38..238b28b5 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc index 856025c5..1ebc6046 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc index 4725fddb..d2d3af5a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc index ab8d2be5..c8d55ae6 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc index 44340f8f..d7dfa678 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc index e7c4e60b..ad15a2e8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc index c76f4082..44db5b49 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc index f759afcb..43d30755 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc index cc29c099..12a43f92 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc index f7d3aa89..7c7decf8 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc index 0cc6b169..03dfa366 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc index acec9698..eae4a983 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc index fbc91704..016dea6b 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc index 69f5c4c6..6889456a 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc index 280961bb..f4571caf 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc index a8aada5b..593a9128 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc index 4822c97e..5b5cfa5c 100644 Binary files a/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc and b/.venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/__pycache__/__init__.cpython-311.pyc index 41857e7c..be284c42 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-311.pyc index 2b2fd0ca..43a598ad 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/__init__.cpython-311.pyc index bdc09cc8..cf7f82ac 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/tarfile.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/tarfile.cpython-311.pyc index 386a4ae5..55e31675 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/tarfile.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/backports/__pycache__/tarfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc index 6520d093..a22dc3bc 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/context.cpython-311.pyc index ace2b514..eb8badb2 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc index 4990aeee..6f2cadc0 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc index e9c15c65..9972dc29 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc index c090e41c..836665f0 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/more.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/more.cpython-311.pyc index 65de6d67..97e11aad 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/more.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/more.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc index a8d9f359..f9fe590d 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-311.pyc index f9b9ace1..5d285238 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc index 2818928d..e5c3db83 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc index b1c5b811..5e8cd548 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc index 99799428..0bd365f9 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_parser.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_parser.cpython-311.pyc index a50a5c83..2ee2be50 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_parser.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-311.pyc index cdacad08..32f255f2 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc index ddf269a1..99d87e3d 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-311.pyc index 5646f5ef..d7ceef2a 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-311.pyc index 3e4cc499..c19d207a 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc index d14fb926..1fca9496 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/tags.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/tags.cpython-311.pyc index 81ef1686..e8b8b431 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/tags.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/tags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-311.pyc index c59f8abc..3ee69bc0 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-311.pyc index 7288721c..8d6ca262 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc index f5d0667a..68763293 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/api.cpython-311.pyc index 28a52cf0..e40223c0 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/version.cpython-311.pyc index 9fe507c4..5d1cecdf 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc index 6895cabe..3cac04a4 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-311.pyc index 0e6c3eb4..254ecaf7 100644 Binary files a/.venv/Lib/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/preshed/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/preshed/__pycache__/__init__.cpython-311.pyc index 46a40a44..f226d0b7 100644 Binary files a/.venv/Lib/site-packages/preshed/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/preshed/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/preshed/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/preshed/__pycache__/about.cpython-311.pyc index 36c85a0e..c111e009 100644 Binary files a/.venv/Lib/site-packages/preshed/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/preshed/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/proglog-0.1.10.dist-info/INSTALLER b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/proglog-0.1.10.dist-info/LICENSE b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/LICENSE new file mode 100644 index 00000000..ace49c07 --- /dev/null +++ b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Edinburgh Genome Foundry + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/Lib/site-packages/proglog-0.1.10.dist-info/METADATA b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/METADATA new file mode 100644 index 00000000..69edf2dc --- /dev/null +++ b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/METADATA @@ -0,0 +1,34 @@ +Metadata-Version: 2.1 +Name: proglog +Version: 0.1.10 +Summary: Log and progress bar manager for console, notebooks, web... +Author: Zulko +License: MIT +Keywords: logger log progress bar +Platform: UNKNOWN +License-File: LICENSE +Requires-Dist: tqdm + +Proglog +======= + +Proglog is a progress logging system for Python. It allows to build complex +libraries while giving the user control on the management of logs, callbacks and progress bars. + + +Infos +----- + +**PIP installation:** + +.. code:: bash + + pip install proglog + +**Github Page** + +``_ + +**License:** MIT, Copyright Edinburgh Genome Foundry + + diff --git a/.venv/Lib/site-packages/proglog-0.1.10.dist-info/RECORD b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/RECORD new file mode 100644 index 00000000..f7c58649 --- /dev/null +++ b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/RECORD @@ -0,0 +1,12 @@ +proglog-0.1.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +proglog-0.1.10.dist-info/LICENSE,sha256=akVjFR4jDJtNakzf3avomKdfddMyA9WHJ5aVV08efxY,1081 +proglog-0.1.10.dist-info/METADATA,sha256=7HqXAeqQ_WotmBFY28nag6MAHWRdXYCY0x1Xmv49hKI,639 +proglog-0.1.10.dist-info/RECORD,, +proglog-0.1.10.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +proglog-0.1.10.dist-info/top_level.txt,sha256=KeCzTcuXoZxru4GWsCpIwgVBfHdDrI0ujnDKu61sFXE,8 +proglog/__init__.py,sha256=m68k-TnqoChy0FjkHWqSfPuEiQgl69Cfs-N4Kp7k-tU,302 +proglog/__pycache__/__init__.cpython-311.pyc,, +proglog/__pycache__/proglog.cpython-311.pyc,, +proglog/__pycache__/version.cpython-311.pyc,, +proglog/proglog.py,sha256=qWQSpWM-pR_pyh5vrrlVBU094bkatfj9prbz5PJpK7U,13250 +proglog/version.py,sha256=z0zCHFTcKSR0tJ6h5qrpNmRVP21QIPP8N0p7quCnnm0,23 diff --git a/.venv/Lib/site-packages/proglog-0.1.10.dist-info/WHEEL b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/proglog-0.1.10.dist-info/top_level.txt b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/top_level.txt new file mode 100644 index 00000000..efd532dc --- /dev/null +++ b/.venv/Lib/site-packages/proglog-0.1.10.dist-info/top_level.txt @@ -0,0 +1 @@ +proglog diff --git a/.venv/Lib/site-packages/proglog/__init__.py b/.venv/Lib/site-packages/proglog/__init__.py new file mode 100644 index 00000000..f15abb41 --- /dev/null +++ b/.venv/Lib/site-packages/proglog/__init__.py @@ -0,0 +1,9 @@ +""" geneblocks/__init__.py """ + +# __all__ = [] + +from .proglog import (ProgressLogger, ProgressBarLogger, TqdmProgressBarLogger, + notebook, RqWorkerProgressLogger, RqWorkerBarLogger, + MuteProgressBarLogger, default_bar_logger) + +from .version import __version__ diff --git a/.venv/Lib/site-packages/proglog/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/proglog/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..f5f9d988 Binary files /dev/null and b/.venv/Lib/site-packages/proglog/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/proglog/__pycache__/proglog.cpython-311.pyc b/.venv/Lib/site-packages/proglog/__pycache__/proglog.cpython-311.pyc new file mode 100644 index 00000000..712b8bcf Binary files /dev/null and b/.venv/Lib/site-packages/proglog/__pycache__/proglog.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/proglog/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/proglog/__pycache__/version.cpython-311.pyc new file mode 100644 index 00000000..02572967 Binary files /dev/null and b/.venv/Lib/site-packages/proglog/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/proglog/proglog.py b/.venv/Lib/site-packages/proglog/proglog.py new file mode 100644 index 00000000..a3d9e6ad --- /dev/null +++ b/.venv/Lib/site-packages/proglog/proglog.py @@ -0,0 +1,391 @@ +"""Implements the generic progress logger class, and the ProgressBar class. +""" + +from tqdm import tqdm, tqdm_notebook +from collections import OrderedDict +import time + +SETTINGS = { + 'notebook': False +} + +def notebook(turn='on'): + SETTINGS['notebook'] = True if (turn == 'on') else False + +def troncate_string(s, max_length=25): + return s if (len(s) < max_length) else (s[:max_length] + "...") + +class ProgressLogger: + """Generic class for progress loggers. + + A progress logger contains a "state" dictionnary. + + Parameters + ---------- + + init_state + Dictionnary representing the initial state. + """ + + def __init__(self, init_state=None): + + self.state = {} + self.stored = {} + self.logs = [] + self.log_indent = 0 + if init_state is not None: + self.state.update(init_state) + + def log(self, message): + self.logs.append((' ' * self.log_indent) + message) + + def dump_logs(self, filepath=None): + if filepath is not None: + with open(filepath, 'a') as f: + f.write("\n".join(self.logs)) + else: + return "\n".join(self.logs) + + def callback(self, **kw): + """Execute something after the state has been updated by the given + state elements. + + This default callback does nothing, overwrite it by subclassing + """ + pass + + def store(self, **kw): + """Store objects in the logger and trigger ``self.store_callback``. + + This works exactly like ``logger()``, but the later is meant for simple + data objects (text, numbers) that will be sent over the network or + written to a file. The ``store`` method expects rather large objects + which are not necessarily serializable, and will be used eg to draw + plots on the fly. + """ + self.stored.update(kw) + self.store_callback(**kw) + + def store_callback(self, **kw): + """Execute something after the store has been updated by the given + state elements. + + This default callback does nothing, overwrite it by subclassing + """ + pass + + def iter(self, **kw): + """Iterate through a list while updating the state. + + Examples + -------- + + >>> for username in logger.iter(user=['tom', 'tim', 'lea']: + >>> # At every loop, logger.state['user'] is updated + >>> print (username) + + """ + for field, iterable in kw.items(): + for it in iterable: + self(**{field: it}) + yield it + + + + + def __call__(self, **kw): + self.state.update(kw) + self.callback(**kw) + +class ProgressBarLogger(ProgressLogger): + """Generic class for progress loggers. + + A progress logger contains a "state" dictionnary + + Parameters + ---------- + + init_state + Initial state of the logger + + bars + Either None (will be initialized with no bar) or a list/tuple of bar + names (``['main', 'sub']``) which will be initialized with index -1 and + no total, or a dictionary (possibly ordered) of bars, of the form + ``{bar_1: {title: 'bar1', index: 2, total:23}, bar_2: {...}}`` + + ignored_bars + Either None (newly met bars will be added) or a list of blacklisted bar + names, or ``'all_others'`` to signify that all bar names not already in + ``self.bars`` will be ignored. + """ + + bar_indent = 2 + + def __init__(self, init_state=None, bars=None, ignored_bars=None, + logged_bars='all', min_time_interval=0, ignore_bars_under=0): + ProgressLogger.__init__(self, init_state) + if bars is None: + bars = OrderedDict() + elif isinstance(bars, (list, tuple)): + bars = OrderedDict([ + (b, dict(title=b, index=-1, total=None, message=None, + indent=0)) + for b in bars + ]) + if isinstance(ignored_bars, (list, tuple)): + ignored_bars = set(ignored_bars) + self.ignored_bars = ignored_bars + self.logged_bars = logged_bars + self.state['bars'] = bars + self.min_time_interval = min_time_interval + self.ignore_bars_under = ignore_bars_under + + @property + def bars(self): + """Return ``self.state['bars'].``""" + return self.state['bars'] + + def bar_is_ignored(self, bar): + if self.ignored_bars is None: + return False + elif self.ignored_bars == 'all_others': + return (bar not in self.bars) + else: + return bar in self.ignored_bars + + def bar_is_logged(self, bar): + if (not self.logged_bars): + return False + elif self.logged_bars == 'all': + return True + else: + return bar in self.logged_bars + + def iterable_is_too_short(self, iterable): + length = len(iterable) if hasattr(iterable, '__len__') else None + return (length is not None) and (length < self.ignore_bars_under) + + def iter_bar(self, bar_prefix='', **kw): + """Iterate through a list while updating a state bar. + + Examples + -------- + >>> for username in logger.iter_bar(user=['tom', 'tim', 'lea']): + >>> # At every loop, logger.state['bars']['user'] is updated + >>> # to {index: i, total: 3, title:'user'} + >>> print (username) + + """ + if 'bar_message' in kw: + bar_message = kw.pop('bar_message') + else: + bar_message = None + bar, iterable = kw.popitem() + + if self.bar_is_ignored(bar) or self.iterable_is_too_short(iterable): + return iterable + bar = bar_prefix + bar + if hasattr(iterable, '__len__'): + self(**{bar + '__total': len(iterable)}) + + def new_iterable(): + last_time = time.time() + i = 0 # necessary in case the iterator is empty + for i, it in enumerate(iterable): + now_time = time.time() + if (i == 0) or (now_time - last_time > self.min_time_interval): + if bar_message is not None: + self(**{bar + '__message': bar_message(it)}) + self(**{bar + '__index': i}) + last_time = now_time + yield it + + if self.bars[bar]['index'] != i: + self(**{bar + '__index': i}) + self(**{bar + '__index': i + 1}) + + return new_iterable() + + def bars_callback(self, bar, attr, value, old_value=None): + """Execute a custom action after the progress bars are updated. + + Parameters + ---------- + bar + Name/ID of the bar to be modified. + + attr + Attribute of the bar attribute to be modified + + value + New value of the attribute + + old_value + Previous value of this bar's attribute. + + This default callback does nothing, overwrite it by subclassing. + """ + pass + + def __call__(self, **kw): + + items = sorted(kw.items(), key=lambda kv: not kv[0].endswith('total')) + + for key, value in items: + if '__' in key: + bar, attr = key.split('__') + if self.bar_is_ignored(bar): + continue + kw.pop(key) + if bar not in self.bars: + self.bars[bar] = dict(title=bar, index=-1, + total=None, message=None) + old_value = self.bars[bar][attr] + + if self.bar_is_logged(bar): + new_bar = (attr == 'index') and (value < old_value) + if (attr == 'total') or (new_bar): + self.bars[bar]['indent'] = self.log_indent + else: + self.log_indent = self.bars[bar]['indent'] + self.log("[%s] %s: %s" % (bar, attr, value)) + self.log_indent += self.bar_indent + self.bars[bar][attr] = value + self.bars_callback(bar, attr, value, old_value) + self.state.update(kw) + self.callback(**kw) + +class TqdmProgressBarLogger(ProgressBarLogger): + """Tqdm-powered progress bar for console or Notebooks. + + Parameters + ---------- + init_state + Initial state of the logger + + bars + Either None (will be initialized with no bar) or a list/tuple of bar + names (``['main', 'sub']``) which will be initialized with index -1 and + no total, or a dictionary (possibly ordered) of bars, of the form + ``{bar_1: {title: 'bar1', index: 2, total:23}, bar_2: {...}}`` + + ignored_bars + Either None (newly met bars will be added) or a list of blacklisted bar + names, or ``'all_others'`` to signify that all bar names not already in + ``self.bars`` will be ignored. + + + leave_bars + + notebook + True will make the bars look nice (HTML) in the jupyter notebook. It is + advised to leave to 'default' as the default can be globally set from + inside a notebook with ``import proglog; proglog.notebook_mode()``. + + print_messages + If True, every ``logger(message='something')`` will print a message in + the console / notebook + """ + + def __init__(self, init_state=None, bars=None, leave_bars=False, + ignored_bars=None, logged_bars='all', notebook='default', + print_messages=True, min_time_interval=0, + ignore_bars_under=0): + ProgressBarLogger.__init__(self, init_state=init_state, bars=bars, + ignored_bars=ignored_bars, + logged_bars=logged_bars, + ignore_bars_under=ignore_bars_under, + min_time_interval=min_time_interval) + self.leave_bars = leave_bars + self.tqdm_bars = OrderedDict([ + (bar, None) + for bar in self.bars + ]) + if notebook == 'default': + notebook = SETTINGS['notebook'] + self.notebook = notebook + self.print_messages = print_messages + self.tqdm = (tqdm_notebook if self.notebook else tqdm) + + def new_tqdm_bar(self, bar): + """Create a new tqdm bar, possibly replacing an existing one.""" + if (bar in self.tqdm_bars) and (self.tqdm_bars[bar] is not None): + self.close_tqdm_bar(bar) + infos = self.bars[bar] + self.tqdm_bars[bar] = self.tqdm( + total=infos['total'], + desc=infos['title'], + postfix=dict(now=troncate_string(str(infos['message']))), + leave=self.leave_bars + ) + def close_tqdm_bar(self, bar): + """Close and erase the tqdm bar""" + self.tqdm_bars[bar].close() + if not self.notebook: + self.tqdm_bars[bar] = None + + def bars_callback(self, bar, attr, value, old_value): + if (bar not in self.tqdm_bars) or (self.tqdm_bars[bar] is None): + self.new_tqdm_bar(bar) + if attr == 'index': + if value >= old_value: + total = self.bars[bar]['total'] + if total and (value >= total): + self.close_tqdm_bar(bar) + else: + self.tqdm_bars[bar].update(value - old_value) + else: + self.new_tqdm_bar(bar) + self.tqdm_bars[bar].update(value + 1) + elif attr == 'message': + self.tqdm_bars[bar].set_postfix(now=troncate_string(str(value))) + self.tqdm_bars[bar].update(0) + + def callback(self, **kw): + if self.print_messages and ('message' in kw) and kw['message']: + if self.notebook: + print(kw['message']) + else: + self.tqdm.write(kw['message']) + +class RqWorkerProgressLogger: + def __init__(self, job): + self.job = job + if 'progress_data' not in self.job.meta: + self.job.meta['progress_data'] = {} + self.job.save() + + def callback(self, **kw): + self.job.meta['progress_data'] = self.state + self.job.save() + +class RqWorkerBarLogger(RqWorkerProgressLogger, ProgressBarLogger): + + def __init__(self, job, init_state=None, bars=None, ignored_bars=(), + logged_bars='all', min_time_interval=0): + RqWorkerProgressLogger.__init__(self, job) + ProgressBarLogger.__init__(self, init_state=init_state, bars=bars, + ignored_bars=ignored_bars, + logged_bars=logged_bars, + min_time_interval=min_time_interval) + +class MuteProgressBarLogger(ProgressBarLogger): + + def bar_is_ignored(self, bar): + return True + +def default_bar_logger(logger, bars=None, ignored_bars=None, logged_bars='all', + min_time_interval=0, ignore_bars_under=0): + if logger == 'bar': + return TqdmProgressBarLogger( + bars=bars, + ignored_bars=ignored_bars, + logged_bars=logged_bars, + min_time_interval=min_time_interval, + ignore_bars_under=ignore_bars_under + ) + elif logger is None: + return MuteProgressBarLogger() + else: + return logger diff --git a/.venv/Lib/site-packages/proglog/version.py b/.venv/Lib/site-packages/proglog/version.py new file mode 100644 index 00000000..569b1212 --- /dev/null +++ b/.venv/Lib/site-packages/proglog/version.py @@ -0,0 +1 @@ +__version__ = "0.1.10" diff --git a/.venv/Lib/site-packages/psutil/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/psutil/__pycache__/__init__.cpython-311.pyc index 3fe77a82..5956ac9e 100644 Binary files a/.venv/Lib/site-packages/psutil/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/psutil/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/psutil/__pycache__/_common.cpython-311.pyc b/.venv/Lib/site-packages/psutil/__pycache__/_common.cpython-311.pyc index 7aa37e99..3ed20e72 100644 Binary files a/.venv/Lib/site-packages/psutil/__pycache__/_common.cpython-311.pyc and b/.venv/Lib/site-packages/psutil/__pycache__/_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/psutil/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/psutil/__pycache__/_compat.cpython-311.pyc index ac2a32f6..9ef2ff7e 100644 Binary files a/.venv/Lib/site-packages/psutil/__pycache__/_compat.cpython-311.pyc and b/.venv/Lib/site-packages/psutil/__pycache__/_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/psutil/__pycache__/_pswindows.cpython-311.pyc b/.venv/Lib/site-packages/psutil/__pycache__/_pswindows.cpython-311.pyc index a37db94f..1830d922 100644 Binary files a/.venv/Lib/site-packages/psutil/__pycache__/_pswindows.cpython-311.pyc and b/.venv/Lib/site-packages/psutil/__pycache__/_pswindows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-311.pyc index 56794429..ed0776d4 100644 Binary files a/.venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-311.pyc index bbc9e945..473685bc 100644 Binary files a/.venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-311.pyc index 36f09522..a9d6824b 100644 Binary files a/.venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-311.pyc index 964666f6..aa05201a 100644 Binary files a/.venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-311.pyc index 984cface..788f60df 100644 Binary files a/.venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-311.pyc index 6e737abb..83f399c6 100644 Binary files a/.venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-311.pyc index 93d95a51..eab17acd 100644 Binary files a/.venv/Lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/ply/__pycache__/lex.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/ply/__pycache__/lex.cpython-311.pyc index 6c1124b6..0de5ed86 100644 Binary files a/.venv/Lib/site-packages/pycparser/ply/__pycache__/lex.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/ply/__pycache__/lex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-311.pyc b/.venv/Lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-311.pyc index d37dc40a..7c0fa383 100644 Binary files a/.venv/Lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-311.pyc and b/.venv/Lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycrfsuite/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pycrfsuite/__pycache__/__init__.cpython-311.pyc index 88c60540..4a3f9b0c 100644 Binary files a/.venv/Lib/site-packages/pycrfsuite/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pycrfsuite/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycrfsuite/__pycache__/_dumpparser.cpython-311.pyc b/.venv/Lib/site-packages/pycrfsuite/__pycache__/_dumpparser.cpython-311.pyc index 1196ccb9..5600a925 100644 Binary files a/.venv/Lib/site-packages/pycrfsuite/__pycache__/_dumpparser.cpython-311.pyc and b/.venv/Lib/site-packages/pycrfsuite/__pycache__/_dumpparser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pycrfsuite/__pycache__/_logparser.cpython-311.pyc b/.venv/Lib/site-packages/pycrfsuite/__pycache__/_logparser.cpython-311.pyc index 0ba69673..4705d13b 100644 Binary files a/.venv/Lib/site-packages/pycrfsuite/__pycache__/_logparser.cpython-311.pyc and b/.venv/Lib/site-packages/pycrfsuite/__pycache__/_logparser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/__init__.cpython-311.pyc index 73708baa..fefc743b 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/_migration.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/_migration.cpython-311.pyc index ed2cb5e7..5f47431e 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/_migration.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/_migration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/aliases.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/aliases.cpython-311.pyc index 08cb9841..8b252469 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/aliases.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/aliases.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/annotated_handlers.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/annotated_handlers.cpython-311.pyc index b7676383..eef0316a 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/annotated_handlers.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/annotated_handlers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/config.cpython-311.pyc index 659afb6c..cb4684f5 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/errors.cpython-311.pyc index d237e24c..374a59b7 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/fields.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/fields.cpython-311.pyc index 7fd32de1..09e8e850 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/fields.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/fields.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/generics.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/generics.cpython-311.pyc index 6e25ae26..ff17114c 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/generics.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/generics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/json_schema.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/json_schema.cpython-311.pyc index 765059a2..fae2aef7 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/json_schema.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/json_schema.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/main.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/main.cpython-311.pyc index 4835714b..73bb4303 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/main.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/type_adapter.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/type_adapter.cpython-311.pyc index 71b5bd66..e1383b4d 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/type_adapter.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/type_adapter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/types.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/types.cpython-311.pyc index 3d624176..37c09d3d 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/types.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/version.cpython-311.pyc index f3cf3fff..cb807994 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/__pycache__/warnings.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/__pycache__/warnings.cpython-311.pyc index d7ee421c..0b23e4f1 100644 Binary files a/.venv/Lib/site-packages/pydantic/__pycache__/warnings.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/__pycache__/warnings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/__init__.cpython-311.pyc index 4fea1917..33cc5f9b 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_config.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_config.cpython-311.pyc index b6d837d6..32e316e9 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_config.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-311.pyc index 049207dd..68ee0110 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-311.pyc index 478d5921..860b0452 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-311.pyc index 8c5c88d1..d8de6df8 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-311.pyc index 7af7b450..bd732fe9 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-311.pyc index 833ea543..fefbd4ae 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_fields.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_fields.cpython-311.pyc index 9b29ac2a..82dce311 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_fields.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_fields.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-311.pyc index 82cbb26a..44056445 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-311.pyc index 41986c9f..b12ffbba 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generics.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generics.cpython-311.pyc index 09ef2137..e206ac02 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generics.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_generics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-311.pyc index d05a4000..7c68c2f0 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-311.pyc index 63c71321..057e84ba 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-311.pyc index f1d03dab..4cbcb62c 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-311.pyc index 66f52663..6b6cdbc0 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_repr.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_repr.cpython-311.pyc index 42b8efdf..40082b45 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_repr.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_repr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-311.pyc index a185b088..adfcec14 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_signature.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_signature.cpython-311.pyc index fe83becd..58eb26ef 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_signature.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_signature.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_std_types_schema.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_std_types_schema.cpython-311.pyc index 1ee1f6fd..c4ac5707 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_std_types_schema.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_std_types_schema.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-311.pyc index af8b8f59..b3a5bdd2 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_utils.cpython-311.pyc index c966a9a6..34112343 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-311.pyc index b55185c9..946b7a76 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validators.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validators.cpython-311.pyc index 20316715..54cf8ff1 100644 Binary files a/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validators.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/_internal/__pycache__/_validators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/plugin/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/plugin/__pycache__/__init__.cpython-311.pyc index ba47beda..2fbec8e8 100644 Binary files a/.venv/Lib/site-packages/pydantic/plugin/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/plugin/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_loader.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_loader.cpython-311.pyc index c1db60e1..1b769869 100644 Binary files a/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_loader.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_loader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_schema_validator.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_schema_validator.cpython-311.pyc index 913cefbb..01810efb 100644 Binary files a/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_schema_validator.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/plugin/__pycache__/_schema_validator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/__init__.cpython-311.pyc index d6f163da..7a117278 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/annotated_types.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/annotated_types.cpython-311.pyc index b1782646..f7a59150 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/annotated_types.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/annotated_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/class_validators.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/class_validators.cpython-311.pyc index 781b76af..e86bc793 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/class_validators.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/class_validators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/color.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/color.cpython-311.pyc index 225bfe11..17db5b05 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/color.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/color.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/config.cpython-311.pyc index 19123fac..5bcea245 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/dataclasses.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/dataclasses.cpython-311.pyc index c2d9d5e1..44ccb171 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/dataclasses.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/dataclasses.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/datetime_parse.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/datetime_parse.cpython-311.pyc index 50a5fa68..eea699df 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/datetime_parse.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/datetime_parse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/decorator.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/decorator.cpython-311.pyc index 1b3ab79d..aecf6b67 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/decorator.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/decorator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/env_settings.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/env_settings.cpython-311.pyc index a9971eab..55a80b5d 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/env_settings.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/env_settings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/error_wrappers.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/error_wrappers.cpython-311.pyc index e45e89a0..6ec4f87d 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/error_wrappers.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/error_wrappers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/errors.cpython-311.pyc index 0d05f933..030b40ac 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/fields.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/fields.cpython-311.pyc index 69b5623b..c82a485c 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/fields.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/fields.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/json.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/json.cpython-311.pyc index c8f2e0e7..9a20afa5 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/json.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/json.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/main.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/main.cpython-311.pyc index 109ddfb1..e35ec387 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/main.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/networks.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/networks.cpython-311.pyc index 34e9415e..0ddce47d 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/networks.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/networks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/parse.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/parse.cpython-311.pyc index 73b4881f..bf1de9e2 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/parse.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/parse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/schema.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/schema.cpython-311.pyc index 8022077c..b0f544cb 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/schema.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/schema.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/tools.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/tools.cpython-311.pyc index b1099316..c89b7163 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/tools.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/tools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/types.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/types.cpython-311.pyc index 54b8de1d..11e86515 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/types.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/typing.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/typing.cpython-311.pyc index d4f1164d..cbbc6809 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/typing.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/utils.cpython-311.pyc index 55ef9c43..83c922e5 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/validators.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/validators.cpython-311.pyc index 22621171..1419200f 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/validators.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/validators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic/v1/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/pydantic/v1/__pycache__/version.cpython-311.pyc index acc7d947..a3ff24ca 100644 Binary files a/.venv/Lib/site-packages/pydantic/v1/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic/v1/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic_core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pydantic_core/__pycache__/__init__.cpython-311.pyc index 0bfe6b39..7388c22b 100644 Binary files a/.venv/Lib/site-packages/pydantic_core/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic_core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pydantic_core/__pycache__/core_schema.cpython-311.pyc b/.venv/Lib/site-packages/pydantic_core/__pycache__/core_schema.cpython-311.pyc index 702d8c7b..53a8a79d 100644 Binary files a/.venv/Lib/site-packages/pydantic_core/__pycache__/core_schema.cpython-311.pyc and b/.venv/Lib/site-packages/pydantic_core/__pycache__/core_schema.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/__init__.cpython-311.pyc index b6010f84..5cfe79d0 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/actions.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/actions.cpython-311.pyc index 1922d210..90ccfaaf 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/actions.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/actions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/common.cpython-311.pyc index e78c7aba..950e3c4f 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/core.cpython-311.pyc index 09adcbe2..10f6e5a4 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/exceptions.cpython-311.pyc index 4a75ec44..e3f12eb6 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/helpers.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/helpers.cpython-311.pyc index 6c3ec68c..310d2c53 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/helpers.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/results.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/results.cpython-311.pyc index f876aaf4..66a7d4cb 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/results.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/results.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/testing.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/testing.cpython-311.pyc index 72619b54..c278e8a1 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/testing.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/testing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/unicode.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/unicode.cpython-311.pyc index 57755a6d..be0ebe14 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/unicode.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/unicode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pyparsing/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/pyparsing/__pycache__/util.cpython-311.pyc index c6f85276..675e5221 100644 Binary files a/.venv/Lib/site-packages/pyparsing/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/pyparsing/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/__init__.cpython-311.pyc index 7d5d50ea..5543c4ff 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/compat.cpython-311.pyc index 715c59af..251520f4 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/constants.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/constants.cpython-311.pyc index 20113a4e..9239a98b 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/constants.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/converter.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/converter.cpython-311.pyc index 4d2a7627..13700e72 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/converter.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/converter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/core.cpython-311.pyc index 99618764..2f152bcc 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/phonetic_symbol.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/phonetic_symbol.cpython-311.pyc index a019e0c7..d40cda95 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/phonetic_symbol.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/phonetic_symbol.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/phrases_dict.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/phrases_dict.cpython-311.pyc index 3ba6b68b..3d71b4f0 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/phrases_dict.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/phrases_dict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/pinyin_dict.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/pinyin_dict.cpython-311.pyc index fe814132..dfdddf13 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/pinyin_dict.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/pinyin_dict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/standard.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/standard.cpython-311.pyc index de0556c1..18b3b1e0 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/standard.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/standard.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/__pycache__/utils.cpython-311.pyc index fabfd655..be6a7244 100644 Binary files a/.venv/Lib/site-packages/pypinyin/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/__init__.cpython-311.pyc index 14217c9d..fb26d51a 100644 Binary files a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/_tone_rule.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/_tone_rule.cpython-311.pyc index 12ab8875..e7112de3 100644 Binary files a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/_tone_rule.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/_tone_rule.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/neutral_tone.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/neutral_tone.cpython-311.pyc index c45b9a2b..14a0c2eb 100644 Binary files a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/neutral_tone.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/neutral_tone.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_convert.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_convert.cpython-311.pyc index 68751626..ada93683 100644 Binary files a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_convert.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_convert.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_sandhi.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_sandhi.cpython-311.pyc index 29a1c503..3ff8b593 100644 Binary files a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_sandhi.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/tone_sandhi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/uv.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/uv.cpython-311.pyc index b629d11c..7953969b 100644 Binary files a/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/uv.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/contrib/__pycache__/uv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/seg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/seg/__pycache__/__init__.cpython-311.pyc index 155c6cab..4f8586ce 100644 Binary files a/.venv/Lib/site-packages/pypinyin/seg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/seg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/seg/__pycache__/mmseg.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/seg/__pycache__/mmseg.cpython-311.pyc index 35ffbf5c..ea0ffb5f 100644 Binary files a/.venv/Lib/site-packages/pypinyin/seg/__pycache__/mmseg.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/seg/__pycache__/mmseg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/seg/__pycache__/simpleseg.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/seg/__pycache__/simpleseg.cpython-311.pyc index 589da9df..e8d0fb29 100644 Binary files a/.venv/Lib/site-packages/pypinyin/seg/__pycache__/simpleseg.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/seg/__pycache__/simpleseg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/__init__.cpython-311.pyc index b7970af8..6a7e1463 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_constants.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_constants.cpython-311.pyc index 9d7947f4..537dc3ee 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_constants.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_convert.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_convert.cpython-311.pyc index cb87e4ae..57dcf683 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_convert.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_convert.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_rule.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_rule.cpython-311.pyc index b1771fef..d21c754c 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_rule.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_tone_rule.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_utils.cpython-311.pyc index 73e5e059..d682d0d9 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/bopomofo.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/bopomofo.cpython-311.pyc index 70e8ee7b..fb5e6cd3 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/bopomofo.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/bopomofo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/cyrillic.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/cyrillic.cpython-311.pyc index 054beed2..9d84d9a5 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/cyrillic.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/cyrillic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/finals.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/finals.cpython-311.pyc index e000ac47..cd604636 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/finals.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/finals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/initials.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/initials.cpython-311.pyc index d35c5373..d42a1a77 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/initials.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/initials.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/others.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/others.cpython-311.pyc index ae5fc3d0..81e963a0 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/others.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/others.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/tone.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/tone.cpython-311.pyc index fd69ef1b..98c55632 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/tone.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/tone.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pypinyin/style/__pycache__/wadegiles.cpython-311.pyc b/.venv/Lib/site-packages/pypinyin/style/__pycache__/wadegiles.cpython-311.pyc index 00b671b6..e6b7c0a2 100644 Binary files a/.venv/Lib/site-packages/pypinyin/style/__pycache__/wadegiles.cpython-311.pyc and b/.venv/Lib/site-packages/pypinyin/style/__pycache__/wadegiles.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/__init__.cpython-311.pyc index 250745ac..b199c04b 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/abbreviation_replacer.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/abbreviation_replacer.cpython-311.pyc index 1ff5147c..b34b734c 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/abbreviation_replacer.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/abbreviation_replacer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/about.cpython-311.pyc index d7b48a2a..76bdd3e6 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/between_punctuation.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/between_punctuation.cpython-311.pyc index 33936e6a..ec70e9cb 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/between_punctuation.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/between_punctuation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/cleaner.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/cleaner.cpython-311.pyc index 65cd3f94..a7732310 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/cleaner.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/cleaner.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/exclamation_words.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/exclamation_words.cpython-311.pyc index 0bf2f885..565200ca 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/exclamation_words.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/exclamation_words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/languages.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/languages.cpython-311.pyc index 60d87f54..1711d440 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/languages.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/languages.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/lists_item_replacer.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/lists_item_replacer.cpython-311.pyc index fccdac03..73409b59 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/lists_item_replacer.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/lists_item_replacer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/processor.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/processor.cpython-311.pyc index 13aba041..4c0aa9d7 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/processor.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/processor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/punctuation_replacer.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/punctuation_replacer.cpython-311.pyc index 555cbf53..514fc073 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/punctuation_replacer.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/punctuation_replacer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/segmenter.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/segmenter.cpython-311.pyc index a8deefa1..26b77900 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/segmenter.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/segmenter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/__pycache__/utils.cpython-311.pyc index 54e66107..e068c20b 100644 Binary files a/.venv/Lib/site-packages/pysbd/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/clean/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/clean/__pycache__/__init__.cpython-311.pyc index 514839dc..a7d95c14 100644 Binary files a/.venv/Lib/site-packages/pysbd/clean/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/clean/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/clean/__pycache__/rules.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/clean/__pycache__/rules.cpython-311.pyc index ee071431..7db48088 100644 Binary files a/.venv/Lib/site-packages/pysbd/clean/__pycache__/rules.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/clean/__pycache__/rules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/__init__.cpython-311.pyc index f9bcfe21..2aeb8fe7 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/amharic.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/amharic.cpython-311.pyc index 1ae278a3..4481fdc3 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/amharic.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/amharic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/arabic.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/arabic.cpython-311.pyc index ca132782..2e54efeb 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/arabic.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/arabic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/armenian.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/armenian.cpython-311.pyc index ff485afd..48176423 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/armenian.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/armenian.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/bulgarian.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/bulgarian.cpython-311.pyc index 521b16c6..3ee5a1f1 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/bulgarian.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/bulgarian.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/burmese.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/burmese.cpython-311.pyc index 533f10f5..4b4562ef 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/burmese.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/burmese.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/chinese.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/chinese.cpython-311.pyc index 358c4b1d..c44faff2 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/chinese.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/chinese.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/danish.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/danish.cpython-311.pyc index a12484b6..4fb40a3d 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/danish.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/danish.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/deutsch.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/deutsch.cpython-311.pyc index 6c4ad9c4..500c5784 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/deutsch.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/deutsch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/dutch.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/dutch.cpython-311.pyc index f743e3ac..aa6ec66f 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/dutch.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/dutch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/english.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/english.cpython-311.pyc index 587c04ab..5ba76a12 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/english.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/english.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/french.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/french.cpython-311.pyc index f15510af..b1dfbef7 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/french.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/french.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/greek.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/greek.cpython-311.pyc index 9402a422..22f50552 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/greek.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/greek.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/hindi.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/hindi.cpython-311.pyc index 136e2962..ed0a13b1 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/hindi.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/hindi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/italian.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/italian.cpython-311.pyc index 2ed11f6f..9544f17e 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/italian.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/italian.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/japanese.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/japanese.cpython-311.pyc index e3aa06c6..5937b915 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/japanese.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/japanese.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/kazakh.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/kazakh.cpython-311.pyc index 9f90bb23..5a3120a0 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/kazakh.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/kazakh.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/marathi.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/marathi.cpython-311.pyc index b58113df..6c3be96c 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/marathi.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/marathi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/persian.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/persian.cpython-311.pyc index 1ab585c2..2e8ec78e 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/persian.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/persian.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/polish.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/polish.cpython-311.pyc index 94adae03..1abff85e 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/polish.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/polish.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/russian.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/russian.cpython-311.pyc index e679f158..2367c6cb 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/russian.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/russian.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/slovak.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/slovak.cpython-311.pyc index 5d59354f..48592f7a 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/slovak.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/slovak.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/spanish.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/spanish.cpython-311.pyc index bd5d98a3..1c6bab2b 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/spanish.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/spanish.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/__pycache__/urdu.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/__pycache__/urdu.cpython-311.pyc index 62b2a6c6..ac7eda25 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/__pycache__/urdu.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/__pycache__/urdu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/__init__.cpython-311.pyc index 893de04d..3b29ee14 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/common.cpython-311.pyc index 0df2e7aa..2a568aea 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/standard.cpython-311.pyc b/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/standard.cpython-311.pyc index 11ad4eb2..4e53d388 100644 Binary files a/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/standard.cpython-311.pyc and b/.venv/Lib/site-packages/pysbd/lang/common/__pycache__/standard.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pytz/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/pytz/__pycache__/__init__.cpython-311.pyc index c31bab9e..92a9c53c 100644 Binary files a/.venv/Lib/site-packages/pytz/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/pytz/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pytz/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/pytz/__pycache__/exceptions.cpython-311.pyc index a0318461..25e7e302 100644 Binary files a/.venv/Lib/site-packages/pytz/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/pytz/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pytz/__pycache__/lazy.cpython-311.pyc b/.venv/Lib/site-packages/pytz/__pycache__/lazy.cpython-311.pyc index 67a86d37..27f0373c 100644 Binary files a/.venv/Lib/site-packages/pytz/__pycache__/lazy.cpython-311.pyc and b/.venv/Lib/site-packages/pytz/__pycache__/lazy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pytz/__pycache__/tzfile.cpython-311.pyc b/.venv/Lib/site-packages/pytz/__pycache__/tzfile.cpython-311.pyc index 12072612..ddf13934 100644 Binary files a/.venv/Lib/site-packages/pytz/__pycache__/tzfile.cpython-311.pyc and b/.venv/Lib/site-packages/pytz/__pycache__/tzfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/pytz/__pycache__/tzinfo.cpython-311.pyc b/.venv/Lib/site-packages/pytz/__pycache__/tzinfo.cpython-311.pyc index 34dad297..48f57c9e 100644 Binary files a/.venv/Lib/site-packages/pytz/__pycache__/tzinfo.cpython-311.pyc and b/.venv/Lib/site-packages/pytz/__pycache__/tzinfo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/regex/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/regex/__pycache__/__init__.cpython-311.pyc index ec460dba..33f82686 100644 Binary files a/.venv/Lib/site-packages/regex/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/regex/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/regex/__pycache__/_regex_core.cpython-311.pyc b/.venv/Lib/site-packages/regex/__pycache__/_regex_core.cpython-311.pyc index 531d4724..072e889b 100644 Binary files a/.venv/Lib/site-packages/regex/__pycache__/_regex_core.cpython-311.pyc and b/.venv/Lib/site-packages/regex/__pycache__/_regex_core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/regex/__pycache__/regex.cpython-311.pyc b/.venv/Lib/site-packages/regex/__pycache__/regex.cpython-311.pyc index 401ac38a..0c871f51 100644 Binary files a/.venv/Lib/site-packages/regex/__pycache__/regex.cpython-311.pyc and b/.venv/Lib/site-packages/regex/__pycache__/regex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/__init__.cpython-311.pyc index cf640710..4cff0704 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/__version__.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/__version__.cpython-311.pyc index f13e3aa5..643138d7 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/__version__.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/__version__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/_internal_utils.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/_internal_utils.cpython-311.pyc index 24909afb..f04b0cb7 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/_internal_utils.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/_internal_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/adapters.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/adapters.cpython-311.pyc index 2bb3036d..574e96bc 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/adapters.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/adapters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/api.cpython-311.pyc index 2896f066..c2d40494 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/auth.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/auth.cpython-311.pyc index 3944550b..390725c9 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/auth.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/auth.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/certs.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/certs.cpython-311.pyc index 14fc5d44..7209a772 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/certs.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/certs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/compat.cpython-311.pyc index a761ef68..8eb678d2 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/cookies.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/cookies.cpython-311.pyc index 82f8bf70..1ef1713b 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/cookies.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/cookies.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/exceptions.cpython-311.pyc index 932a6296..c9549f15 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/hooks.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/hooks.cpython-311.pyc index 9c216501..03c445f8 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/hooks.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/hooks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/models.cpython-311.pyc index 49589d38..79b67e15 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/models.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/packages.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/packages.cpython-311.pyc index 98759939..39d24698 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/packages.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/packages.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/sessions.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/sessions.cpython-311.pyc index 68bae73d..95d5120b 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/sessions.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/sessions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/status_codes.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/status_codes.cpython-311.pyc index a2df849a..ef12811d 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/status_codes.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/status_codes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/structures.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/structures.cpython-311.pyc index 77ffa205..6c3e293b 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/structures.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/requests/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/requests/__pycache__/utils.cpython-311.pyc index f2218c11..aa02e56d 100644 Binary files a/.venv/Lib/site-packages/requests/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/requests/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/safetensors/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/safetensors/__pycache__/__init__.cpython-311.pyc index c90ad169..0fc277ca 100644 Binary files a/.venv/Lib/site-packages/safetensors/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/safetensors/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/safetensors/__pycache__/torch.cpython-311.pyc b/.venv/Lib/site-packages/safetensors/__pycache__/torch.cpython-311.pyc index 6fafaabf..d412b6b4 100644 Binary files a/.venv/Lib/site-packages/safetensors/__pycache__/torch.cpython-311.pyc and b/.venv/Lib/site-packages/safetensors/__pycache__/torch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/__pycache__/__config__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/__pycache__/__config__.cpython-311.pyc index 06f7cd2b..771b1d4a 100644 Binary files a/.venv/Lib/site-packages/scipy/__pycache__/__config__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/__pycache__/__config__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/__pycache__/__init__.cpython-311.pyc index d83dd114..9f7a9c1c 100644 Binary files a/.venv/Lib/site-packages/scipy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/__pycache__/_distributor_init.cpython-311.pyc b/.venv/Lib/site-packages/scipy/__pycache__/_distributor_init.cpython-311.pyc index 23eeaa1c..2e0a6f1f 100644 Binary files a/.venv/Lib/site-packages/scipy/__pycache__/_distributor_init.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/__pycache__/_distributor_init.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/scipy/__pycache__/version.cpython-311.pyc index 4b40fea9..e4d4334e 100644 Binary files a/.venv/Lib/site-packages/scipy/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/__init__.cpython-311.pyc index c11f8c6e..a3dedc94 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_array_api.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_array_api.cpython-311.pyc index 6a823180..a1c9449c 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_array_api.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_array_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_bunch.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_bunch.cpython-311.pyc index 08298d90..6301f742 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_bunch.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_bunch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_ccallback.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_ccallback.cpython-311.pyc index 8da5fb2d..30915e66 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_ccallback.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_ccallback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_docscrape.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_docscrape.cpython-311.pyc index 93ce6f23..55b27fde 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_docscrape.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_docscrape.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_finite_differences.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_finite_differences.cpython-311.pyc index 04c5b9eb..cbbb1f2b 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_finite_differences.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_finite_differences.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_pep440.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_pep440.cpython-311.pyc index 52223989..ea6eabaa 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_pep440.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_pep440.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_testutils.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_testutils.cpython-311.pyc index 9648ef8a..830cb998 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_testutils.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_testutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_threadsafety.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_threadsafety.cpython-311.pyc index e625155c..03516cb0 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_threadsafety.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_threadsafety.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_util.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_util.cpython-311.pyc index 64453735..5f0c3807 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/_util.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/decorator.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/decorator.cpython-311.pyc index 73ce4e4f..e578109d 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/decorator.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/decorator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/deprecation.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/deprecation.cpython-311.pyc index da5c877e..f1be2735 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/deprecation.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/deprecation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/doccer.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/doccer.cpython-311.pyc index a912d1b9..ba46f321 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/doccer.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/doccer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/__pycache__/uarray.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/__pycache__/uarray.cpython-311.pyc index b950d560..39f1bd80 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/__pycache__/uarray.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/__pycache__/uarray.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/__init__.cpython-311.pyc index 978918a9..98c721c4 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/_backend.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/_backend.cpython-311.pyc index 4a393a47..ae5df12f 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/_backend.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/_uarray/__pycache__/_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/__init__.cpython-311.pyc index 46d4c600..925f5fb9 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/_internal.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/_internal.cpython-311.pyc index 7c37f107..e5965d19 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/_internal.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/__pycache__/_internal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/__init__.cpython-311.pyc index 37461884..f6463d20 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_aliases.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_aliases.cpython-311.pyc index 1c67a162..adb13207 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_aliases.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_aliases.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_helpers.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_helpers.cpython-311.pyc index caf30366..38035784 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_linalg.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_linalg.cpython-311.pyc index 1f1aa84d..a34e6580 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_linalg.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/common/__pycache__/_linalg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/__init__.cpython-311.pyc index 56047349..88c3c2ed 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/_aliases.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/_aliases.cpython-311.pyc index 598410b9..eb0cad41 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/_aliases.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/_aliases.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/linalg.cpython-311.pyc b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/linalg.cpython-311.pyc index eed2859b..172050f4 100644 Binary files a/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/linalg.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/_lib/array_api_compat/numpy/__pycache__/linalg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/constants/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/constants/__pycache__/__init__.cpython-311.pyc index 826a0f82..f4f4202e 100644 Binary files a/.venv/Lib/site-packages/scipy/constants/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/constants/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/constants/__pycache__/_codata.cpython-311.pyc b/.venv/Lib/site-packages/scipy/constants/__pycache__/_codata.cpython-311.pyc index 56ec6d87..6bfa8ec9 100644 Binary files a/.venv/Lib/site-packages/scipy/constants/__pycache__/_codata.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/constants/__pycache__/_codata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/constants/__pycache__/_constants.cpython-311.pyc b/.venv/Lib/site-packages/scipy/constants/__pycache__/_constants.cpython-311.pyc index a348b5d2..21744272 100644 Binary files a/.venv/Lib/site-packages/scipy/constants/__pycache__/_constants.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/constants/__pycache__/_constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/constants/__pycache__/codata.cpython-311.pyc b/.venv/Lib/site-packages/scipy/constants/__pycache__/codata.cpython-311.pyc index 36f6362f..7f2414d4 100644 Binary files a/.venv/Lib/site-packages/scipy/constants/__pycache__/codata.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/constants/__pycache__/codata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/constants/__pycache__/constants.cpython-311.pyc b/.venv/Lib/site-packages/scipy/constants/__pycache__/constants.cpython-311.pyc index bb6c5683..6c87e4ca 100644 Binary files a/.venv/Lib/site-packages/scipy/constants/__pycache__/constants.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/constants/__pycache__/constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/__init__.cpython-311.pyc index 605f5448..1275b29b 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_backend.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_backend.cpython-311.pyc index f14b82d0..56a245eb 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_backend.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic.cpython-311.pyc index b86ac6fd..748e6f0f 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic_backend.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic_backend.cpython-311.pyc index b3078f3c..d1798df5 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic_backend.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_basic_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog.cpython-311.pyc index 021272b4..8acd4b3e 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog_backend.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog_backend.cpython-311.pyc index 4b0d46ce..9d1f33ff 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog_backend.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_fftlog_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_helper.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_helper.cpython-311.pyc index 1acdc51f..534943de 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_helper.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_helper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms.cpython-311.pyc index 415d4c6d..4664acb2 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms_backend.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms_backend.cpython-311.pyc index b50c0e5f..37df5696 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms_backend.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/__pycache__/_realtransforms_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/__init__.cpython-311.pyc index 0b03abce..3bf644d2 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/basic.cpython-311.pyc index 2fde844d..75998128 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/helper.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/helper.cpython-311.pyc index f28b3c27..0a955b8a 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/helper.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/helper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/realtransforms.cpython-311.pyc b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/realtransforms.cpython-311.pyc index 7736041d..38d8612a 100644 Binary files a/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/realtransforms.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/fft/_pocketfft/__pycache__/realtransforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/__init__.cpython-311.pyc index 45e9d0eb..776b5749 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_bvp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_bvp.cpython-311.pyc index 32e96060..20bb4ba4 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_bvp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_bvp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_ode.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_ode.cpython-311.pyc index 0febfba0..b9ab613e 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_ode.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_ode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_odepack_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_odepack_py.cpython-311.pyc index 1f6c1ddb..3b044c52 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_odepack_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_odepack_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quad_vec.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quad_vec.cpython-311.pyc index ce304f36..c9bcf4f3 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quad_vec.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quad_vec.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadpack_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadpack_py.cpython-311.pyc index 093f6dab..4ae70408 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadpack_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadpack_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadrature.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadrature.cpython-311.pyc index 204dc64b..3c1a7fbd 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadrature.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/_quadrature.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/dop.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/dop.cpython-311.pyc index 65dd058b..4c56a8c5 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/dop.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/dop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/lsoda.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/lsoda.cpython-311.pyc index ff811634..04b741d6 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/lsoda.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/lsoda.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/odepack.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/odepack.cpython-311.pyc index cc7aa1c7..c3f71341 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/odepack.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/odepack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/quadpack.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/quadpack.cpython-311.pyc index c1f69939..6a23f5bc 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/quadpack.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/quadpack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/__pycache__/vode.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/__pycache__/vode.cpython-311.pyc index 16b3448d..c78c5fb9 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/__pycache__/vode.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/__pycache__/vode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/__init__.cpython-311.pyc index 7a528838..feff49f6 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/base.cpython-311.pyc index ad396bda..1e3b9986 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/bdf.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/bdf.cpython-311.pyc index d1b7d8f1..dd6ed9b9 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/bdf.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/bdf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/common.cpython-311.pyc index fae4bb5b..e9ab55c3 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/dop853_coefficients.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/dop853_coefficients.cpython-311.pyc index 79a4b86d..c5e7da4a 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/dop853_coefficients.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/dop853_coefficients.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/ivp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/ivp.cpython-311.pyc index 948ee156..ee3d091e 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/ivp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/ivp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/lsoda.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/lsoda.cpython-311.pyc index 222eb216..1d5026cf 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/lsoda.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/lsoda.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/radau.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/radau.cpython-311.pyc index 0b6bb49c..2fad4dec 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/radau.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/radau.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/rk.cpython-311.pyc b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/rk.cpython-311.pyc index ca1a8abe..4401a865 100644 Binary files a/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/rk.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/integrate/_ivp/__pycache__/rk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/__init__.cpython-311.pyc index b2d2d98d..024ea2a0 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_bsplines.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_bsplines.cpython-311.pyc index 27d99a30..1d168c32 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_bsplines.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_bsplines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_cubic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_cubic.cpython-311.pyc index 82a7de3c..3212ded5 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_cubic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_cubic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack2.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack2.cpython-311.pyc index 1f0db54c..016c77f3 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack2.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_impl.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_impl.cpython-311.pyc index 7a967714..443db657 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_impl.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_py.cpython-311.pyc index d69105e3..14537429 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_fitpack_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_interpolate.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_interpolate.cpython-311.pyc index b00261dc..6cb625af 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_interpolate.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_interpolate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndbspline.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndbspline.cpython-311.pyc index e16300e9..f8ddbdfe 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndbspline.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndbspline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndgriddata.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndgriddata.cpython-311.pyc index ec72b6c6..8ee52a59 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndgriddata.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_ndgriddata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_pade.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_pade.cpython-311.pyc index bc5934af..877b9762 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_pade.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_pade.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_polyint.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_polyint.cpython-311.pyc index 53171293..2d6c6725 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_polyint.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_polyint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbf.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbf.cpython-311.pyc index c4fcb415..5bbaaa53 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbf.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbfinterp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbfinterp.cpython-311.pyc index 6ddebe49..74d20356 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbfinterp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rbfinterp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rgi.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rgi.cpython-311.pyc index c8967063..ea9e8a6a 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rgi.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/_rgi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack.cpython-311.pyc index a2754fc1..54fba131 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack2.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack2.cpython-311.pyc index 4e0b66ba..6cbe3241 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack2.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/fitpack2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/interpolate.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/interpolate.cpython-311.pyc index 918b59be..20239afc 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/interpolate.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/interpolate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/ndgriddata.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/ndgriddata.cpython-311.pyc index a25d4205..716b22d2 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/ndgriddata.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/ndgriddata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/polyint.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/polyint.cpython-311.pyc index c6bed52c..47bf474e 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/polyint.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/polyint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/rbf.cpython-311.pyc b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/rbf.cpython-311.pyc index dd61bec5..7a287951 100644 Binary files a/.venv/Lib/site-packages/scipy/interpolate/__pycache__/rbf.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/interpolate/__pycache__/rbf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/__init__.cpython-311.pyc index 2b5f3476..8bf8c99f 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/_fortran.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/_fortran.cpython-311.pyc index ca08e1bc..006261bd 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/_fortran.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/_fortran.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/_idl.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/_idl.cpython-311.pyc index 2f37d015..74f8b39e 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/_idl.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/_idl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/_mmio.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/_mmio.cpython-311.pyc index d7757978..2b103b01 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/_mmio.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/_mmio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/_netcdf.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/_netcdf.cpython-311.pyc index 5c5df026..bb781fdc 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/_netcdf.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/_netcdf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/harwell_boeing.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/harwell_boeing.cpython-311.pyc index d637b046..11bf74c0 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/harwell_boeing.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/harwell_boeing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/idl.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/idl.cpython-311.pyc index 2ff2507d..5005c6af 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/idl.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/idl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/mmio.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/mmio.cpython-311.pyc index 4b457f54..7efcbe27 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/mmio.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/mmio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/netcdf.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/netcdf.cpython-311.pyc index 1faf7558..d7886a4c 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/netcdf.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/netcdf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/__pycache__/wavfile.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/__pycache__/wavfile.cpython-311.pyc index adb1d5c5..21e88c5b 100644 Binary files a/.venv/Lib/site-packages/scipy/io/__pycache__/wavfile.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/__pycache__/wavfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/_fast_matrix_market/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/_fast_matrix_market/__pycache__/__init__.cpython-311.pyc index f4bf3393..852bc346 100644 Binary files a/.venv/Lib/site-packages/scipy/io/_fast_matrix_market/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/_fast_matrix_market/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/__init__.cpython-311.pyc index 6879e3d4..0f2dbb1b 100644 Binary files a/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/_fortran_format_parser.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/_fortran_format_parser.cpython-311.pyc index 41960e18..92db2337 100644 Binary files a/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/_fortran_format_parser.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/_fortran_format_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/hb.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/hb.cpython-311.pyc index c46ecdb0..608a8063 100644 Binary files a/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/hb.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/_harwell_boeing/__pycache__/hb.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/arff/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/arff/__pycache__/__init__.cpython-311.pyc index 1fb1209d..ccab7e3a 100644 Binary files a/.venv/Lib/site-packages/scipy/io/arff/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/arff/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/arff/__pycache__/_arffread.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/arff/__pycache__/_arffread.cpython-311.pyc index 97a4cb40..0186510e 100644 Binary files a/.venv/Lib/site-packages/scipy/io/arff/__pycache__/_arffread.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/arff/__pycache__/_arffread.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/arff/__pycache__/arffread.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/arff/__pycache__/arffread.cpython-311.pyc index f35bf8ed..a3c33d52 100644 Binary files a/.venv/Lib/site-packages/scipy/io/arff/__pycache__/arffread.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/arff/__pycache__/arffread.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/__init__.cpython-311.pyc index 7d0e3dbd..a035955f 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_byteordercodes.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_byteordercodes.cpython-311.pyc index 3f9a96af..8a0aec71 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_byteordercodes.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_byteordercodes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio.cpython-311.pyc index 25a41c76..17d6fea4 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio4.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio4.cpython-311.pyc index ae10f9e4..f59f499b 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio4.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio4.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5.cpython-311.pyc index 97871ff2..35891b75 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5_params.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5_params.cpython-311.pyc index c25ddbae..48a20a76 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5_params.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_mio5_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_miobase.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_miobase.cpython-311.pyc index c763f435..3b552688 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_miobase.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/_miobase.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/byteordercodes.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/byteordercodes.cpython-311.pyc index 1a956bbb..56453577 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/byteordercodes.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/byteordercodes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio.cpython-311.pyc index 973315f1..c98aa29a 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio4.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio4.cpython-311.pyc index 523ed483..4765b0fb 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio4.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio4.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5.cpython-311.pyc index 08fa96b4..e858bbfb 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_params.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_params.cpython-311.pyc index d5c23b93..357678d2 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_params.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_utils.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_utils.cpython-311.pyc index 8ae7acbd..0d05cc46 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_utils.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio5_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio_utils.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio_utils.cpython-311.pyc index 0d8523cf..0c92b5ee 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio_utils.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/mio_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/miobase.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/miobase.cpython-311.pyc index 0fa52855..7269702b 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/miobase.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/miobase.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/streams.cpython-311.pyc b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/streams.cpython-311.pyc index 4e73648b..e126b861 100644 Binary files a/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/streams.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/io/matlab/__pycache__/streams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-be.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-be.wav index 056333e7..20b9264e 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-be.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-be.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-le.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-le.wav index 57e6f178..aa79a3ae 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-le.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-2ch-32bit-float-le.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-be-1ch-4bytes.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-be-1ch-4bytes.wav index 1825dfcf..6fa1d82c 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-be-1ch-4bytes.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-be-1ch-4bytes.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof-no-data.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof-no-data.wav index bb86f2f3..8c227b75 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof-no-data.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof-no-data.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof.wav index d1b7065c..87af8be1 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-early-eof.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-incomplete-chunk.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-incomplete-chunk.wav index 7271fdd2..f8d332e9 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-incomplete-chunk.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes-incomplete-chunk.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes.wav index 8aae8e2c..69713001 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-44100Hz-le-1ch-4bytes.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-48000Hz-2ch-64bit-float-le-wavex.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-48000Hz-2ch-64bit-float-le-wavex.wav index 31221b2a..e8ae4914 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-48000Hz-2ch-64bit-float-le-wavex.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-48000Hz-2ch-64bit-float-le-wavex.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-be-3ch-5S-24bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-be-3ch-5S-24bit.wav index db596cc5..71934ac8 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-be-3ch-5S-24bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-be-3ch-5S-24bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-10S-20bit-extra.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-10S-20bit-extra.wav index 13f131e3..4dc4ebed 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-10S-20bit-extra.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-10S-20bit-extra.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-1byte-ulaw.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-1byte-ulaw.wav index c4fed626..b3c27ca6 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-1byte-ulaw.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-1ch-1byte-ulaw.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-2ch-1byteu.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-2ch-1byteu.wav index 70900819..f733f90d 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-2ch-1byteu.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-2ch-1byteu.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit-inconsistent.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit-inconsistent.wav index 8e79d54d..fdd1849f 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit-inconsistent.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit-inconsistent.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit.wav index 9c4312bc..23be5c7b 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-24bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-36bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-36bit.wav index 5c28ed81..3152c566 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-36bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-36bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-45bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-45bit.wav index 2d4eea22..2e9dc760 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-45bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-45bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-53bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-53bit.wav index 68437dad..99ec1413 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-53bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-53bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-64bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-64bit.wav index ef478def..09ea5152 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-64bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-3ch-5S-64bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-4ch-9S-12bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-4ch-9S-12bit.wav index 9c93e132..d6f534ab 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-4ch-9S-12bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-4ch-9S-12bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-5ch-9S-5bit.wav b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-5ch-9S-5bit.wav index b95bcdf3..d2fa9b3a 100644 Binary files a/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-5ch-9S-5bit.wav and b/.venv/Lib/site-packages/scipy/io/tests/data/test-8000Hz-le-5ch-9S-5bit.wav differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/__init__.cpython-311.pyc index f920c806..ec68d210 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_basic.cpython-311.pyc index 6bd290f7..e0ae092a 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp.cpython-311.pyc index cc12070a..d74ef3c2 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cholesky.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cholesky.cpython-311.pyc index 1e4bab15..b0f7021f 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cholesky.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cholesky.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cossin.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cossin.cpython-311.pyc index 41400931..a36d3c6a 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cossin.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_cossin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_ldl.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_ldl.cpython-311.pyc index 51867381..a203ebc7 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_ldl.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_ldl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_lu.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_lu.cpython-311.pyc index 5eae0d8f..5ecba192 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_lu.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_lu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_polar.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_polar.cpython-311.pyc index 1df86d2e..73fad184 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_polar.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_polar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qr.cpython-311.pyc index 3722caf5..d02ecb52 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qz.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qz.cpython-311.pyc index bdc713cc..1a714883 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qz.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_qz.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_schur.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_schur.cpython-311.pyc index 241b3a38..c5f033a7 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_schur.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_schur.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_svd.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_svd.cpython-311.pyc index d3d90c79..74d90129 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_svd.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_decomp_svd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_expm_frechet.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_expm_frechet.cpython-311.pyc index 8d348d6b..fb7da134 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_expm_frechet.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_expm_frechet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_interpolative_backend.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_interpolative_backend.cpython-311.pyc index f053cfcf..fa07e1f5 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_interpolative_backend.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_interpolative_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs.cpython-311.pyc index 57b30dc1..1ecba644 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs_sqrtm.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs_sqrtm.cpython-311.pyc index a214b42e..10c541e3 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs_sqrtm.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_matfuncs_sqrtm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_misc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_misc.cpython-311.pyc index 3c175c68..4189b8f1 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_misc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_procrustes.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_procrustes.cpython-311.pyc index 38444ac7..0dd556c3 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_procrustes.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_procrustes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_sketches.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_sketches.cpython-311.pyc index 1046a1fb..be02505c 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_sketches.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_sketches.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_solvers.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_solvers.cpython-311.pyc index 599fb311..5eaf5bda 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_solvers.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_solvers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_special_matrices.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_special_matrices.cpython-311.pyc index e06f9338..c3e69e7d 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/_special_matrices.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/_special_matrices.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/basic.cpython-311.pyc index 003448f2..7bb850ca 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/blas.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/blas.cpython-311.pyc index 75536366..05207508 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/blas.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/blas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp.cpython-311.pyc index 588e96ad..acc6d7b3 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_cholesky.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_cholesky.cpython-311.pyc index fba16a45..d0398af8 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_cholesky.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_cholesky.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_lu.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_lu.cpython-311.pyc index 4937a4ee..db2f7488 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_lu.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_lu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_qr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_qr.cpython-311.pyc index ca4ab123..699cc3dc 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_qr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_qr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_schur.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_schur.cpython-311.pyc index f774ab39..ecbf2411 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_schur.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_schur.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_svd.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_svd.cpython-311.pyc index 5ce8dfa7..752dfcb2 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_svd.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/decomp_svd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/interpolative.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/interpolative.cpython-311.pyc index 3738270c..45405a25 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/interpolative.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/interpolative.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/lapack.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/lapack.cpython-311.pyc index 5bd0d225..0b458350 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/lapack.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/lapack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/matfuncs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/matfuncs.cpython-311.pyc index 1cde6824..f5dd6a40 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/matfuncs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/matfuncs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/misc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/misc.cpython-311.pyc index c3b5d312..74f3b9cf 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/misc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/linalg/__pycache__/special_matrices.cpython-311.pyc b/.venv/Lib/site-packages/scipy/linalg/__pycache__/special_matrices.cpython-311.pyc index 40acfa03..d27a210d 100644 Binary files a/.venv/Lib/site-packages/scipy/linalg/__pycache__/special_matrices.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/linalg/__pycache__/special_matrices.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/__init__.cpython-311.pyc index f410a243..dc4b563b 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_filters.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_filters.cpython-311.pyc index c2a0f7fd..36350532 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_filters.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_filters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_fourier.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_fourier.cpython-311.pyc index 03ff00ad..aa3c8220 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_fourier.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_fourier.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_interpolation.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_interpolation.cpython-311.pyc index 91dfe41f..b1115c6e 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_interpolation.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_interpolation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_measurements.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_measurements.cpython-311.pyc index 671310b2..4c378f4f 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_measurements.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_measurements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_morphology.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_morphology.cpython-311.pyc index 2baa50e8..ada41a3e 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_morphology.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_morphology.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_docstrings.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_docstrings.cpython-311.pyc index b486a86d..fec4c8dc 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_docstrings.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_docstrings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_support.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_support.cpython-311.pyc index 6cffcb6c..a67501c3 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_support.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/_ni_support.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/filters.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/filters.cpython-311.pyc index 4022788c..6d43f6ec 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/filters.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/filters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/fourier.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/fourier.cpython-311.pyc index 4512828d..867200c1 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/fourier.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/fourier.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/interpolation.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/interpolation.cpython-311.pyc index 2a07abaa..58af075f 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/interpolation.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/interpolation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/measurements.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/measurements.cpython-311.pyc index 330fb3d6..effe1e58 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/measurements.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/measurements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/morphology.cpython-311.pyc b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/morphology.cpython-311.pyc index 799bb9e3..d1a1eb25 100644 Binary files a/.venv/Lib/site-packages/scipy/ndimage/__pycache__/morphology.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/ndimage/__pycache__/morphology.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/__init__.cpython-311.pyc index 6431a562..43588f93 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_basinhopping.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_basinhopping.cpython-311.pyc index b26b30f9..40619bfa 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_basinhopping.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_basinhopping.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_cobyla_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_cobyla_py.cpython-311.pyc index 1b0de99f..f13a6e1d 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_cobyla_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_cobyla_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_constraints.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_constraints.cpython-311.pyc index 38629990..87f4250a 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_constraints.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_constraints.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dcsrch.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dcsrch.cpython-311.pyc index 2078d9f0..dc9bc5c2 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dcsrch.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dcsrch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentiable_functions.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentiable_functions.cpython-311.pyc index 63612a7f..38849772 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentiable_functions.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentiable_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentialevolution.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentialevolution.cpython-311.pyc index 3a13645c..4a307f5f 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentialevolution.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_differentialevolution.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_direct_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_direct_py.cpython-311.pyc index 5e00615c..a82e1b47 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_direct_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_direct_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dual_annealing.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dual_annealing.cpython-311.pyc index b3a4ab91..f65c42b7 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dual_annealing.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_dual_annealing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_hessian_update_strategy.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_hessian_update_strategy.cpython-311.pyc index 2b00b659..cf25d678 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_hessian_update_strategy.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_hessian_update_strategy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_isotonic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_isotonic.cpython-311.pyc index c6764c44..09f7810b 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_isotonic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_isotonic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_lbfgsb_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_lbfgsb_py.cpython-311.pyc index 4438006c..d71d4eda 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_lbfgsb_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_lbfgsb_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linesearch.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linesearch.cpython-311.pyc index 889f1158..d3f38969 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linesearch.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linesearch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog.cpython-311.pyc index d4e3e43b..7f3ce192 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_doc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_doc.cpython-311.pyc index a3817701..4d490aa7 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_doc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_doc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_highs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_highs.cpython-311.pyc index 9fb3a7e8..a31d60dd 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_highs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_highs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_ip.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_ip.cpython-311.pyc index 33f47d5f..f9b13b1c 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_ip.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_ip.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_rs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_rs.cpython-311.pyc index df344ab9..58af447c 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_rs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_rs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_simplex.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_simplex.cpython-311.pyc index 4d97957f..5687b455 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_simplex.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_simplex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_util.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_util.cpython-311.pyc index 2c210aa2..681537a4 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_util.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_linprog_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_milp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_milp.cpython-311.pyc index 41f73c0b..dc2a3d42 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_milp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_milp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minimize.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minimize.cpython-311.pyc index 525b6ee0..876b4b8f 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minimize.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minimize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minpack_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minpack_py.cpython-311.pyc index f6a880c0..76d292fd 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minpack_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_minpack_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nnls.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nnls.cpython-311.pyc index 14457ad3..9ff20d9d 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nnls.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nnls.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nonlin.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nonlin.cpython-311.pyc index 69a00832..60772737 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nonlin.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_nonlin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_numdiff.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_numdiff.cpython-311.pyc index 14ee33b8..27bcd522 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_numdiff.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_numdiff.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_optimize.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_optimize.cpython-311.pyc index a945784f..79689097 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_optimize.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_optimize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_qap.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_qap.cpython-311.pyc index 7a7e1e86..838d1084 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_qap.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_qap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_remove_redundancy.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_remove_redundancy.cpython-311.pyc index 77001ecd..1fcc53fb 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_remove_redundancy.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_remove_redundancy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root.cpython-311.pyc index 09ce998e..0002fd3c 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root_scalar.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root_scalar.cpython-311.pyc index 7076e2eb..5a05df0e 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root_scalar.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_root_scalar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_shgo.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_shgo.cpython-311.pyc index 7d746553..f3103f82 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_shgo.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_shgo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_slsqp_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_slsqp_py.cpython-311.pyc index 352c836f..8bdcc889 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_slsqp_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_slsqp_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_spectral.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_spectral.cpython-311.pyc index 7bc8ed5c..6cfadfa0 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_spectral.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_spectral.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_tnc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_tnc.cpython-311.pyc index ea4404ab..87817dca 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_tnc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_tnc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion.cpython-311.pyc index e7323d14..7963a12b 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_dogleg.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_dogleg.cpython-311.pyc index 80c45d3b..00ae64ff 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_dogleg.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_dogleg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_exact.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_exact.cpython-311.pyc index 748d2813..783c4dcf 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_exact.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_exact.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_krylov.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_krylov.cpython-311.pyc index 1016acad..cb6dd245 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_krylov.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_krylov.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_ncg.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_ncg.cpython-311.pyc index 361d9aad..3d09d7e5 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_ncg.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_trustregion_ncg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_zeros_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_zeros_py.cpython-311.pyc index 2bc3c95a..22abf29f 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/_zeros_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/_zeros_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/cobyla.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/cobyla.cpython-311.pyc index cc9f0a1b..c6ef8af4 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/cobyla.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/cobyla.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/lbfgsb.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/lbfgsb.cpython-311.pyc index 433aa474..3716fb53 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/lbfgsb.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/lbfgsb.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/linesearch.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/linesearch.cpython-311.pyc index e72c5cfb..c67be13a 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/linesearch.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/linesearch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack.cpython-311.pyc index afc75bf4..2a5e276a 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack2.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack2.cpython-311.pyc index a446a510..06a5b50e 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack2.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/minpack2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/moduleTNC.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/moduleTNC.cpython-311.pyc index 0f4f2134..aeffe59a 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/moduleTNC.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/moduleTNC.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/nonlin.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/nonlin.cpython-311.pyc index be63eb50..626409dc 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/nonlin.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/nonlin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/optimize.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/optimize.cpython-311.pyc index bc240981..c6b08116 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/optimize.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/optimize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/slsqp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/slsqp.cpython-311.pyc index 92ed921a..e6777226 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/slsqp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/slsqp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/tnc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/tnc.cpython-311.pyc index 78f76de8..f091eef4 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/tnc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/tnc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/__pycache__/zeros.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/__pycache__/zeros.cpython-311.pyc index 5ff37232..6e2f4cc9 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/__pycache__/zeros.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/__pycache__/zeros.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_highs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_highs/__pycache__/__init__.cpython-311.pyc index 9eeb8d3c..3d7fb87b 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_highs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_highs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/__init__.cpython-311.pyc index 8703a56b..094f8d9f 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/bvls.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/bvls.cpython-311.pyc index 62c932ae..42c3036c 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/bvls.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/bvls.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/common.cpython-311.pyc index 6f1e9ad9..e46f8ca8 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/dogbox.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/dogbox.cpython-311.pyc index db176ff8..d4366227 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/dogbox.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/dogbox.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/least_squares.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/least_squares.cpython-311.pyc index fb61cade..d63be61c 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/least_squares.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/least_squares.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/lsq_linear.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/lsq_linear.cpython-311.pyc index 6ccbdf33..5284829c 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/lsq_linear.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/lsq_linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf.cpython-311.pyc index 5930098c..9ebfb831 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf_linear.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf_linear.cpython-311.pyc index b75eca3c..58603a97 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf_linear.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_lsq/__pycache__/trf_linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/__init__.cpython-311.pyc index c7a6c705..86a0bbc5 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_complex.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_complex.cpython-311.pyc index 09ffb534..921242b6 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_complex.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_complex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_vertex.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_vertex.cpython-311.pyc index ca99482b..057db7d8 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_vertex.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_shgo_lib/__pycache__/_vertex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trlib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trlib/__pycache__/__init__.cpython-311.pyc index a65017a8..0d550ce0 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trlib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/__init__.cpython-311.pyc index acc12bd8..3ab593d1 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/canonical_constraint.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/canonical_constraint.cpython-311.pyc index c90e235d..34dece97 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/canonical_constraint.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/canonical_constraint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/equality_constrained_sqp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/equality_constrained_sqp.cpython-311.pyc index 5af068ae..e0f09359 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/equality_constrained_sqp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/equality_constrained_sqp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/minimize_trustregion_constr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/minimize_trustregion_constr.cpython-311.pyc index de46332a..277f0fc7 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/minimize_trustregion_constr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/minimize_trustregion_constr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/projections.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/projections.cpython-311.pyc index c356f796..dfdf9569 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/projections.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/projections.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/qp_subproblem.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/qp_subproblem.cpython-311.pyc index af11f364..f9c40e18 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/qp_subproblem.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/qp_subproblem.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/report.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/report.cpython-311.pyc index 5c333eb1..061e593a 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/report.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/report.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/tr_interior_point.cpython-311.pyc b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/tr_interior_point.cpython-311.pyc index 12a8ae64..08f3a983 100644 Binary files a/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/tr_interior_point.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/optimize/_trustregion_constr/__pycache__/tr_interior_point.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/__init__.cpython-311.pyc index df20366e..f2ac0e89 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_arraytools.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_arraytools.cpython-311.pyc index 6c71dab3..9e80a792 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_arraytools.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_arraytools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_bsplines.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_bsplines.cpython-311.pyc index 3b5166a8..e55d10ff 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_bsplines.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_bsplines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_czt.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_czt.cpython-311.pyc index b1c36896..907fec1e 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_czt.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_czt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_filter_design.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_filter_design.cpython-311.pyc index 5921a095..d7404ae0 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_filter_design.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_filter_design.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_fir_filter_design.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_fir_filter_design.cpython-311.pyc index 8b6c28cb..687ffe1d 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_fir_filter_design.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_fir_filter_design.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_lti_conversion.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_lti_conversion.cpython-311.pyc index 2961d26e..2e65193d 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_lti_conversion.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_lti_conversion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_ltisys.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_ltisys.cpython-311.pyc index 2ca742f5..9823f672 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_ltisys.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_ltisys.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_max_len_seq.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_max_len_seq.cpython-311.pyc index e792c437..67a1a1b1 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_max_len_seq.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_max_len_seq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_peak_finding.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_peak_finding.cpython-311.pyc index 122ae710..73c26fca 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_peak_finding.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_peak_finding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_savitzky_golay.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_savitzky_golay.cpython-311.pyc index a256e737..88c6b431 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_savitzky_golay.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_savitzky_golay.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_short_time_fft.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_short_time_fft.cpython-311.pyc index 76072d3e..2e0fa061 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_short_time_fft.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_short_time_fft.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_signaltools.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_signaltools.cpython-311.pyc index a53d7377..3514f79f 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_signaltools.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_signaltools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_spectral_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_spectral_py.cpython-311.pyc index dc389d51..2bc36765 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_spectral_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_spectral_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_upfirdn.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_upfirdn.cpython-311.pyc index bf571ed4..d0f6c8b3 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_upfirdn.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_upfirdn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_waveforms.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_waveforms.cpython-311.pyc index 398be407..5b8129c8 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_waveforms.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_waveforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/_wavelets.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/_wavelets.cpython-311.pyc index 3815ca0c..bb6ddacc 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/_wavelets.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/_wavelets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/bsplines.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/bsplines.cpython-311.pyc index f03375c5..2acfe72b 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/bsplines.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/bsplines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/filter_design.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/filter_design.cpython-311.pyc index b93ab139..8e6bb186 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/filter_design.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/filter_design.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/fir_filter_design.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/fir_filter_design.cpython-311.pyc index 9e537758..77718aa3 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/fir_filter_design.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/fir_filter_design.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/lti_conversion.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/lti_conversion.cpython-311.pyc index 67795cf5..81bd0ad5 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/lti_conversion.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/lti_conversion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/ltisys.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/ltisys.cpython-311.pyc index d7fb6707..af688e23 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/ltisys.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/ltisys.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/signaltools.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/signaltools.cpython-311.pyc index fcebc91d..5ecef98e 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/signaltools.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/signaltools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/spectral.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/spectral.cpython-311.pyc index fe094cb2..24aef50b 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/spectral.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/spectral.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/spline.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/spline.cpython-311.pyc index 74a235eb..38136236 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/spline.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/spline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/waveforms.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/waveforms.cpython-311.pyc index 009d91fd..571209f8 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/waveforms.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/waveforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/__pycache__/wavelets.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/__pycache__/wavelets.cpython-311.pyc index fcff684f..b3deed12 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/__pycache__/wavelets.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/__pycache__/wavelets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/__init__.cpython-311.pyc index 52f18d20..d6149c9d 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/_windows.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/_windows.cpython-311.pyc index 1d247d2e..100533af 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/_windows.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/_windows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/windows.cpython-311.pyc b/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/windows.cpython-311.pyc index 07ad828a..5a2fe0ee 100644 Binary files a/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/windows.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/signal/windows/__pycache__/windows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/__init__.cpython-311.pyc index 346cdbfe..f6999281 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_base.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_base.cpython-311.pyc index 00c1edea..bf7a35a4 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_base.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_bsr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_bsr.cpython-311.pyc index 6e8761f9..ffb16006 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_bsr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_bsr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_compressed.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_compressed.cpython-311.pyc index 0fe94f82..2c27c679 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_compressed.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_compressed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_construct.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_construct.cpython-311.pyc index 88384897..2e196c59 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_construct.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_construct.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_coo.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_coo.cpython-311.pyc index 4ed1b73e..cad4b3a0 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_coo.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_coo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csc.cpython-311.pyc index 7e9f0bdb..982addef 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csr.cpython-311.pyc index 7ff3c2cf..0d8760e1 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_csr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_data.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_data.cpython-311.pyc index bd228403..4e1f3b24 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_data.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dia.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dia.cpython-311.pyc index 379deea6..d6f17dd8 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dia.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dia.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dok.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dok.cpython-311.pyc index 7910d77b..c01f21a3 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dok.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_dok.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_extract.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_extract.cpython-311.pyc index 38f5fbe5..cbcd96a4 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_extract.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_extract.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_index.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_index.cpython-311.pyc index dcb89a71..9e507f35 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_index.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_index.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_lil.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_lil.cpython-311.pyc index 350610a2..c13fe6ce 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_lil.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_lil.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix.cpython-311.pyc index 73194805..26117852 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix_io.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix_io.cpython-311.pyc index cccc03cc..d9307ff6 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix_io.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_matrix_io.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_sputils.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_sputils.cpython-311.pyc index 9e3fa406..4763166f 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/_sputils.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/_sputils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/base.cpython-311.pyc index 6204c10e..981f22e0 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/bsr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/bsr.cpython-311.pyc index 390ac696..81a501aa 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/bsr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/bsr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/compressed.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/compressed.cpython-311.pyc index b8936e87..0f8ca0fd 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/compressed.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/compressed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/construct.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/construct.cpython-311.pyc index 9f6aad8e..16b02523 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/construct.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/construct.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/coo.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/coo.cpython-311.pyc index 8f571175..17d49cd6 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/coo.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/coo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/csc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/csc.cpython-311.pyc index c9200d09..44dc7637 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/csc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/csc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/csr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/csr.cpython-311.pyc index 293dd9d0..02cc13e1 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/csr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/csr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/data.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/data.cpython-311.pyc index 93a072ef..08abee0e 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/data.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/dia.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/dia.cpython-311.pyc index 20337b80..c7dd3b5e 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/dia.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/dia.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/dok.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/dok.cpython-311.pyc index ba05fda4..78baab03 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/dok.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/dok.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/extract.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/extract.cpython-311.pyc index a56efcdd..e3100ca7 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/extract.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/extract.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/lil.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/lil.cpython-311.pyc index 5192480e..4479d6f8 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/lil.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/lil.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/sparsetools.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/sparsetools.cpython-311.pyc index 992b456c..6d6703d8 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/sparsetools.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/sparsetools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/__pycache__/sputils.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/__pycache__/sputils.cpython-311.pyc index 6c695848..3c5d802f 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/__pycache__/sputils.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/__pycache__/sputils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/__init__.cpython-311.pyc index c0147671..0460e841 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_laplacian.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_laplacian.cpython-311.pyc index 25bf4571..b7997e30 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_laplacian.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_laplacian.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_validation.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_validation.cpython-311.pyc index b7dbd4d4..725e576c 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_validation.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/csgraph/__pycache__/_validation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/__init__.cpython-311.pyc index ef780a2c..f16c8541 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_expm_multiply.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_expm_multiply.cpython-311.pyc index 04f8a43c..3f8471e9 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_expm_multiply.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_expm_multiply.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_interface.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_interface.cpython-311.pyc index 84dcd46c..e01cbfd0 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_interface.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_interface.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_matfuncs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_matfuncs.cpython-311.pyc index 2723e861..83eccd52 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_matfuncs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_matfuncs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_norm.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_norm.cpython-311.pyc index 0f87dfbe..96ac7197 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_norm.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_norm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_onenormest.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_onenormest.cpython-311.pyc index b0cc48af..645b1549 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_onenormest.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_onenormest.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_special_sparse_arrays.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_special_sparse_arrays.cpython-311.pyc index 9f7799c5..284898b4 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_special_sparse_arrays.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_special_sparse_arrays.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_svdp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_svdp.cpython-311.pyc index a8841d44..806ada37 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_svdp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/_svdp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/dsolve.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/dsolve.cpython-311.pyc index e6f1f1da..46a7cf0f 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/dsolve.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/dsolve.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/eigen.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/eigen.cpython-311.pyc index d0899ae1..23f15b39 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/eigen.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/eigen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/interface.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/interface.cpython-311.pyc index 4c9cc3f6..f43633c3 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/interface.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/interface.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/isolve.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/isolve.cpython-311.pyc index 82189fb2..63de8045 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/isolve.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/isolve.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/matfuncs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/matfuncs.cpython-311.pyc index b34a8055..472a12d6 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/matfuncs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/__pycache__/matfuncs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/__init__.cpython-311.pyc index f4890a44..20e987cc 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/_add_newdocs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/_add_newdocs.cpython-311.pyc index 5e9d62cb..0b6fe03c 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/_add_newdocs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/_add_newdocs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/linsolve.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/linsolve.cpython-311.pyc index 168d7998..69abdc78 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/linsolve.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_dsolve/__pycache__/linsolve.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/__init__.cpython-311.pyc index 3dce2052..4bc5728d 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/_svds.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/_svds.cpython-311.pyc index 8f6b8597..f0af4a9d 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/_svds.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/__pycache__/_svds.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/__init__.cpython-311.pyc index 393e4728..aefedcbf 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/arpack.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/arpack.cpython-311.pyc index edd109f6..2e32eda4 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/arpack.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/arpack/__pycache__/arpack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/__init__.cpython-311.pyc index 19192f3e..da400f82 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/lobpcg.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/lobpcg.cpython-311.pyc index e14be837..c7038427 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/lobpcg.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_eigen/lobpcg/__pycache__/lobpcg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/__init__.cpython-311.pyc index 223bd00b..5f36c3dc 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/_gcrotmk.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/_gcrotmk.cpython-311.pyc index ab32458f..65a3cd99 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/_gcrotmk.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/_gcrotmk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/iterative.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/iterative.cpython-311.pyc index 589bf734..4389e784 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/iterative.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/iterative.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lgmres.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lgmres.cpython-311.pyc index 058ccc90..1ddeb4c8 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lgmres.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lgmres.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsmr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsmr.cpython-311.pyc index 27be8519..661061d3 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsmr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsmr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsqr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsqr.cpython-311.pyc index f7f492a8..95b3f2bd 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsqr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/lsqr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/minres.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/minres.cpython-311.pyc index c7255aa5..46f73d70 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/minres.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/minres.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/tfqmr.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/tfqmr.cpython-311.pyc index 9f85ffb5..f1d0b05f 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/tfqmr.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/tfqmr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/utils.cpython-311.pyc index af79141f..7993912a 100644 Binary files a/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/sparse/linalg/_isolve/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/__init__.cpython-311.pyc index 58b47b82..40c6c57a 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_geometric_slerp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_geometric_slerp.cpython-311.pyc index e1c9902e..a4dc384a 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_geometric_slerp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_geometric_slerp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_kdtree.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_kdtree.cpython-311.pyc index bd9b8b55..4df10ff6 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_kdtree.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_kdtree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_plotutils.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_plotutils.cpython-311.pyc index c7f9aeaa..a47d7644 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_plotutils.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_plotutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_procrustes.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_procrustes.cpython-311.pyc index ea61b115..b58e3065 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_procrustes.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_procrustes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_spherical_voronoi.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_spherical_voronoi.cpython-311.pyc index 0466c90b..97ebd8eb 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/_spherical_voronoi.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/_spherical_voronoi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/ckdtree.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/ckdtree.cpython-311.pyc index 9e5c216e..f764423d 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/ckdtree.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/ckdtree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/distance.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/distance.cpython-311.pyc index 38d34323..3d9262f5 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/distance.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/distance.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/kdtree.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/kdtree.cpython-311.pyc index 0908d208..125d1fba 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/kdtree.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/kdtree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/__pycache__/qhull.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/__pycache__/qhull.cpython-311.pyc index 4bfdcf89..2669da2c 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/__pycache__/qhull.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/__pycache__/qhull.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/__init__.cpython-311.pyc index 78201db5..58aed84c 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_groups.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_groups.cpython-311.pyc index 2a39e973..692ead6e 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_groups.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_groups.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_spline.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_spline.cpython-311.pyc index 1512fbf1..59d25773 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_spline.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/_rotation_spline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/rotation.cpython-311.pyc b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/rotation.cpython-311.pyc index 35d07389..15f72cdf 100644 Binary files a/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/rotation.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/spatial/transform/__pycache__/rotation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/__init__.cpython-311.pyc index 46f0ee45..8be2cd8d 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_basic.cpython-311.pyc index 27feb5ef..b20c9396 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_ellip_harm.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_ellip_harm.cpython-311.pyc index f4b4ab1c..a6c2178b 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_ellip_harm.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_ellip_harm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_lambertw.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_lambertw.cpython-311.pyc index 8f2bf1f4..9cb97e56 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_lambertw.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_lambertw.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_logsumexp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_logsumexp.cpython-311.pyc index 0572a383..81541d76 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_logsumexp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_logsumexp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_orthogonal.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_orthogonal.cpython-311.pyc index 641b09d4..43fd621a 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_orthogonal.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_orthogonal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_sf_error.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_sf_error.cpython-311.pyc index d4a1f8a7..754c23db 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_sf_error.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_sf_error.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_spfun_stats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_spfun_stats.cpython-311.pyc index a750945e..3f2ff8d9 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_spfun_stats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_spfun_stats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_spherical_bessel.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_spherical_bessel.cpython-311.pyc index 878c228f..7f28eb21 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_spherical_bessel.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_spherical_bessel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/_support_alternative_backends.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/_support_alternative_backends.cpython-311.pyc index 87600e7c..081e7928 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/_support_alternative_backends.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/_support_alternative_backends.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/add_newdocs.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/add_newdocs.cpython-311.pyc index 70518392..e46886d8 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/add_newdocs.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/add_newdocs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/basic.cpython-311.pyc index 5b5de40c..fd63bbe8 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/orthogonal.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/orthogonal.cpython-311.pyc index 0285093c..a4a0dd09 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/orthogonal.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/orthogonal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/sf_error.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/sf_error.cpython-311.pyc index b1cc2836..3c882d6a 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/sf_error.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/sf_error.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/specfun.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/specfun.cpython-311.pyc index ee557741..65687692 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/specfun.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/specfun.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/special/__pycache__/spfun_stats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/special/__pycache__/spfun_stats.cpython-311.pyc index 651463f4..72ba1bbe 100644 Binary files a/.venv/Lib/site-packages/scipy/special/__pycache__/spfun_stats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/special/__pycache__/spfun_stats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/__init__.cpython-311.pyc index 358adf62..983ebb8a 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_axis_nan_policy.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_axis_nan_policy.cpython-311.pyc index b71f0fa4..e3c9efd6 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_axis_nan_policy.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_axis_nan_policy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_binned_statistic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_binned_statistic.cpython-311.pyc index b6dfa55c..103522a8 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_binned_statistic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_binned_statistic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_binomtest.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_binomtest.cpython-311.pyc index ff2b6636..cd9284b1 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_binomtest.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_binomtest.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_bws_test.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_bws_test.cpython-311.pyc index 58579282..550d66ed 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_bws_test.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_bws_test.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_censored_data.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_censored_data.cpython-311.pyc index 4ac870c5..4717aa33 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_censored_data.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_censored_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_common.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_common.cpython-311.pyc index c5a011c2..a452489e 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_common.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_constants.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_constants.cpython-311.pyc index 1d324f46..01d6bb62 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_constants.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-311.pyc index 2cd41958..8a28a20b 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_covariance.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_covariance.cpython-311.pyc index aaaad526..ba6ebbae 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_covariance.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_covariance.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_crosstab.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_crosstab.cpython-311.pyc index 7d9bfb0d..e9580dd9 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_crosstab.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_crosstab.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_discrete_distns.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_discrete_distns.cpython-311.pyc index 44164173..48d87a1f 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_discrete_distns.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_discrete_distns.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-311.pyc index 1ffd86e2..4837a3aa 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_distr_params.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_distr_params.cpython-311.pyc index c2c44bc8..74b557bf 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_distr_params.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_distr_params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_entropy.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_entropy.cpython-311.pyc index 739ed447..6d80b430 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_entropy.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_entropy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_fit.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_fit.cpython-311.pyc index c99fa4f7..f9fd74c4 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_fit.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_fit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_hypotests.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_hypotests.cpython-311.pyc index ad9bc9a5..75d67555 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_hypotests.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_hypotests.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_kde.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_kde.cpython-311.pyc index 833333e6..64add821 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_kde.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_kde.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_ksstats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_ksstats.cpython-311.pyc index ff670d2e..a2350d68 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_ksstats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_ksstats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_mannwhitneyu.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_mannwhitneyu.cpython-311.pyc index aedd0f87..dedaa05f 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_mannwhitneyu.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_mannwhitneyu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_morestats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_morestats.cpython-311.pyc index f255a7a1..8c4d7cc3 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_morestats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_morestats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-311.pyc index b089ad26..a4134012 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-311.pyc index 7bc39291..74fef6f1 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_multicomp.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_multicomp.cpython-311.pyc index 407757a8..a613cbc8 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_multicomp.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_multicomp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_multivariate.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_multivariate.cpython-311.pyc index a45f77d8..da9dd69e 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_multivariate.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_multivariate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_odds_ratio.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_odds_ratio.cpython-311.pyc index 1ff417cf..83189af8 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_odds_ratio.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_odds_ratio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_page_trend_test.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_page_trend_test.cpython-311.pyc index 722b587b..9d753ee0 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_page_trend_test.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_page_trend_test.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmc.cpython-311.pyc index 88a02199..fd96ddbe 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmvnt.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmvnt.cpython-311.pyc index d0d3406d..8473fab3 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmvnt.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_qmvnt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_relative_risk.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_relative_risk.cpython-311.pyc index 29c81371..af1cfa6d 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_relative_risk.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_relative_risk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_resampling.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_resampling.cpython-311.pyc index cc364d83..9da58938 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_resampling.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_resampling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_rvs_sampling.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_rvs_sampling.cpython-311.pyc index 4cf1c4a8..45193b9e 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_rvs_sampling.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_rvs_sampling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_sampling.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_sampling.cpython-311.pyc index 3ecb52b1..afc86728 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_sampling.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_sampling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_sensitivity_analysis.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_sensitivity_analysis.cpython-311.pyc index d00e8e5d..6b8e3a27 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_sensitivity_analysis.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_sensitivity_analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_mstats_common.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_mstats_common.cpython-311.pyc index 4e49567d..20f8a116 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_mstats_common.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_mstats_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_py.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_py.cpython-311.pyc index 61ae5546..6f8d826d 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_py.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_survival.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_survival.cpython-311.pyc index 06d8de1c..05f7ba99 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_survival.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_survival.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-311.pyc index 380af1ca..778d9b2f 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_variation.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_variation.cpython-311.pyc index bf9d465b..b67d671b 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_variation.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_variation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-311.pyc index eb9506c0..3befcb9e 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/_wilcoxon.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/_wilcoxon.cpython-311.pyc index cb24bc29..294e721e 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/_wilcoxon.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/_wilcoxon.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/biasedurn.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/biasedurn.cpython-311.pyc index bd2b5b05..7ad17ff6 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/biasedurn.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/biasedurn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/contingency.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/contingency.cpython-311.pyc index 061e1df4..a89a9449 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/contingency.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/contingency.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/distributions.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/distributions.cpython-311.pyc index ac9bc3bf..58bbf2cb 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/distributions.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/distributions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/kde.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/kde.cpython-311.pyc index 53c5cc16..dd95f7e0 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/kde.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/kde.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/morestats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/morestats.cpython-311.pyc index 40b85a54..1cd761ec 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/morestats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/morestats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats.cpython-311.pyc index d3337e1d..705f6bff 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_basic.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_basic.cpython-311.pyc index e43a53b9..941b2f61 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_basic.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_extras.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_extras.cpython-311.pyc index a4525e8e..763bc0ec 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_extras.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/mstats_extras.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/mvn.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/mvn.cpython-311.pyc index 23f4c7f5..6cbf5bf9 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/mvn.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/mvn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/qmc.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/qmc.cpython-311.pyc index 9068104a..171f5f7c 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/qmc.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/qmc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/sampling.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/sampling.cpython-311.pyc index 503e5152..120ebc2b 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/sampling.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/sampling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/__pycache__/stats.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/__pycache__/stats.cpython-311.pyc index 442a53ae..9ce89e22 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/__pycache__/stats.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/__pycache__/stats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/_boost/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/_boost/__pycache__/__init__.cpython-311.pyc index 74196bf0..b8bae5b3 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/_boost/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/_boost/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/_levy_stable/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/_levy_stable/__pycache__/__init__.cpython-311.pyc index e79b6d70..c48de915 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/_levy_stable/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/_levy_stable/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/_rcont/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/_rcont/__pycache__/__init__.cpython-311.pyc index 7630b432..654fb452 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/_rcont/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/_rcont/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/scipy/stats/_unuran/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/scipy/stats/_unuran/__pycache__/__init__.cpython-311.pyc index 0b348147..fc7d0dd6 100644 Binary files a/.venv/Lib/site-packages/scipy/stats/_unuran/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/scipy/stats/_unuran/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-311.pyc index 6f97c48a..83782274 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_core_metadata.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_core_metadata.cpython-311.pyc index d5cc82d9..e5f5c967 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_core_metadata.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_core_metadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_entry_points.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_entry_points.cpython-311.pyc index 591c3991..040e69cd 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_entry_points.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_entry_points.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-311.pyc index e667b316..5e4ec39a 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_importlib.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_importlib.cpython-311.pyc index 1f94eb93..e2208fc6 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_importlib.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_importlib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_itertools.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_itertools.cpython-311.pyc index 65ac7f75..57b9170f 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_itertools.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_itertools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_normalization.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_normalization.cpython-311.pyc index 80e8811d..03eab36e 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_normalization.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_normalization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_path.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_path.cpython-311.pyc index a9058746..e1fdea67 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_path.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_path.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/_reqs.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/_reqs.cpython-311.pyc index 641b74d7..3069d210 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/_reqs.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/_reqs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-311.pyc index 7f19f12c..1939763a 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-311.pyc index 44fd56cb..51d03763 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/discovery.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/discovery.cpython-311.pyc index 721fb3cf..8c9cfdf3 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/discovery.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/discovery.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-311.pyc index 2d05fa29..cc0cf798 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-311.pyc index a9a6d06a..8b254a82 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-311.pyc index 9a4a2350..0761cf52 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-311.pyc index 2bd24fbe..047c1ae1 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/logging.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/logging.cpython-311.pyc index 8afe6dcf..c24c734f 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/logging.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/logging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-311.pyc index b99fae78..3c8d830a 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-311.pyc index f7ea00cc..76a81d26 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-311.pyc index bd9c4305..2adb83b7 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-311.pyc index ff87df64..babe3e13 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/version.cpython-311.pyc index 1d0343dc..b054344f 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/__pycache__/warnings.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/__pycache__/warnings.cpython-311.pyc index 97228b8b..7a82e417 100644 Binary files a/.venv/Lib/site-packages/setuptools/__pycache__/warnings.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/__pycache__/warnings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-311.pyc index 3beadb1f..64f0ee54 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-311.pyc index fabdb757..ef4c4c19 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_functools.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_functools.cpython-311.pyc index 5df7c60f..220b7d68 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_functools.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_functools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_itertools.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_itertools.cpython-311.pyc index 699b1f7f..7b5fe9c6 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_itertools.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_itertools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_log.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_log.cpython-311.pyc index b179b960..c390f3a3 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_log.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_log.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_modified.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_modified.cpython-311.pyc index 159a8072..cdc7b708 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_modified.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_modified.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-311.pyc index 69f40ca4..431de59d 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-311.pyc index 90a4d9bf..7c123de2 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-311.pyc index 2e6557ee..e0b2d69d 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-311.pyc index c80a6738..8cd4c06e 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-311.pyc index a5d1eeae..ee16eaad 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-311.pyc index 4efead1c..d142f17b 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-311.pyc index 3c4d1b6f..98317533 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-311.pyc index 748c3940..7c631f11 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-311.pyc index 603e5b0e..41bc9882 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-311.pyc index f300245c..5e948ccd 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-311.pyc index 2700cc9d..b8d1ffb1 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-311.pyc index 41011e24..1dd9e5c8 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-311.pyc index c2e6db85..dae548d0 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-311.pyc index 05dcdbe1..a30879eb 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-311.pyc index 755b5306..22ac7b42 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/py39compat.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/py39compat.cpython-311.pyc index fbe5e4a1..cae1ed2b 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/py39compat.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/py39compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-311.pyc index a21d5534..bc21aeac 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-311.pyc index 519e540b..206e7efc 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-311.pyc index 6e1939c1..382eca66 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-311.pyc index 0a5f84c2..6f4ee46f 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-311.pyc index c1301999..67c56fdc 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/_framework_compat.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/_framework_compat.cpython-311.pyc index 38199158..5d289467 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/_framework_compat.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/_framework_compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-311.pyc index f06ce100..d4c008d2 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-311.pyc index f71fe11b..b038ecfc 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-311.pyc index 1d30e291..bb4997ce 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-311.pyc index 1745a0e5..8c1a7310 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-311.pyc index 1b9fb85f..a191d985 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-311.pyc index 53bb3fdb..1c92794c 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-311.pyc index 82a47010..e69303ef 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-311.pyc index 276250c2..56a102d8 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-311.pyc index a83675fd..0bb85941 100644 Binary files a/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-311.pyc index 9c3f8849..71c75133 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-311.pyc index c6e29ef2..5f5e0c44 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/__init__.cpython-311.pyc index b9ee9a77..ae918f24 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/tarfile.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/tarfile.cpython-311.pyc index fc27fb87..4359a3fb 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/tarfile.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/backports/__pycache__/tarfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc index efeba57e..bb800897 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/context.cpython-311.pyc index ec6a462d..5179aaf5 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc index 38a414f7..d2f226fe 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/functools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc index edbc7509..e394d0e8 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc index a7fb23c0..11ca71fb 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-311.pyc index 53a7b80d..91e3dc85 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc index d46e6acb..821c7849 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-311.pyc index 498c6815..35f74782 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc index 565c6212..8f308a73 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc index 94a805f6..60b5c09d 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc index 95b9c6c4..b36c3fc3 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_parser.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_parser.cpython-311.pyc index 00cc8a1c..c8618be7 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_parser.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-311.pyc index 19442f3e..b9d17ef0 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc index 8bf4b86e..a424059a 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-311.pyc index 013cb5de..e9b357b8 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-311.pyc index 7587c795..dbcfba19 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc index cf224d85..d0479010 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-311.pyc index d317cab3..53f2e390 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-311.pyc index 4b275f5c..8da9448b 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-311.pyc index c91dff8a..d522207a 100644 Binary files a/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pyc index 9e900b50..4afb6765 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/_requirestxt.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/_requirestxt.cpython-311.pyc index 8ff1d407..f406e21b 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/_requirestxt.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/_requirestxt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pyc index e930abd7..2918ae76 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/build.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/build.cpython-311.pyc index 946d45f3..8428304b 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/build.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/build.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pyc index 09faaf6e..27fcd8fb 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pyc index 8a6a1b76..c4db17ef 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pyc index b4c3e33f..ea6b8513 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-311.pyc index 1df57bcb..100e464a 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pyc index b0ac59a8..2168311a 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pyc index 0db7dcde..d3ad9ff7 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pyc index b8478559..c149088a 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pyc index c0b9d1b8..8d175fce 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pyc index 8d28667d..e751834a 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-311.pyc index 5ee3f535..d346aeb2 100644 Binary files a/.venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/compat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/compat/__pycache__/__init__.cpython-311.pyc index 724c1acf..6add0680 100644 Binary files a/.venv/Lib/site-packages/setuptools/compat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/compat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/compat/__pycache__/py39.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/compat/__pycache__/py39.cpython-311.pyc index 64920911..a41ec031 100644 Binary files a/.venv/Lib/site-packages/setuptools/compat/__pycache__/py39.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/compat/__pycache__/py39.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pyc index 5473a3a3..f566b266 100644 Binary files a/.venv/Lib/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pyc index 0b763f63..a6c9a419 100644 Binary files a/.venv/Lib/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/config/__pycache__/expand.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/config/__pycache__/expand.cpython-311.pyc index 1cd45041..2d2e28d7 100644 Binary files a/.venv/Lib/site-packages/setuptools/config/__pycache__/expand.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/config/__pycache__/expand.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pyc index 5f132c0d..3a55de7e 100644 Binary files a/.venv/Lib/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pyc index 1f1b8b23..24761cac 100644 Binary files a/.venv/Lib/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-311.pyc index 191492ee..d60f0d07 100644 Binary files a/.venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/INSTALLER b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE new file mode 100644 index 00000000..51f34429 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the +licenses found in LICENSE.APACHE2 or LICENSE.MIT. Contributions to are +made under the terms of *both* these licenses. diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE.APACHE2 b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE.APACHE2 new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE.APACHE2 @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE.MIT b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE.MIT new file mode 100644 index 00000000..b8bb9718 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/LICENSE.MIT @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/METADATA b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/METADATA new file mode 100644 index 00000000..88968aed --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: sniffio +Version: 1.3.1 +Summary: Sniff out which async library your code is running under +Author-email: "Nathaniel J. Smith" +License: MIT OR Apache-2.0 +Project-URL: Homepage, https://github.com/python-trio/sniffio +Project-URL: Documentation, https://sniffio.readthedocs.io/ +Project-URL: Changelog, https://sniffio.readthedocs.io/en/latest/history.html +Keywords: async,trio,asyncio +Classifier: License :: OSI Approved :: MIT License +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Framework :: Trio +Classifier: Framework :: AsyncIO +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Intended Audience :: Developers +Classifier: Development Status :: 5 - Production/Stable +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.APACHE2 +License-File: LICENSE.MIT + +.. image:: https://img.shields.io/badge/chat-join%20now-blue.svg + :target: https://gitter.im/python-trio/general + :alt: Join chatroom + +.. image:: https://img.shields.io/badge/docs-read%20now-blue.svg + :target: https://sniffio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/pypi/v/sniffio.svg + :target: https://pypi.org/project/sniffio + :alt: Latest PyPi version + +.. image:: https://img.shields.io/conda/vn/conda-forge/sniffio.svg + :target: https://anaconda.org/conda-forge/sniffio + :alt: Latest conda-forge version + +.. image:: https://travis-ci.org/python-trio/sniffio.svg?branch=master + :target: https://travis-ci.org/python-trio/sniffio + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-trio/sniffio/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-trio/sniffio + :alt: Test coverage + +================================================================= +sniffio: Sniff out which async library your code is running under +================================================================= + +You're writing a library. You've decided to be ambitious, and support +multiple async I/O packages, like `Trio +`__, and `asyncio +`__, and ... You've +written a bunch of clever code to handle all the differences. But... +how do you know *which* piece of clever code to run? + +This is a tiny package whose only purpose is to let you detect which +async library your code is running under. + +* Documentation: https://sniffio.readthedocs.io + +* Bug tracker and source code: https://github.com/python-trio/sniffio + +* License: MIT or Apache License 2.0, your choice + +* Contributor guide: https://trio.readthedocs.io/en/latest/contributing.html + +* Code of conduct: Contributors are requested to follow our `code of + conduct + `_ + in all project spaces. + +This library is maintained by the Trio project, as a service to the +async Python community as a whole. + + +Quickstart +---------- + +.. code-block:: python3 + + from sniffio import current_async_library + import trio + import asyncio + + async def print_library(): + library = current_async_library() + print("This is:", library) + + # Prints "This is trio" + trio.run(print_library) + + # Prints "This is asyncio" + asyncio.run(print_library()) + +For more details, including how to add support to new async libraries, +`please peruse our fine manual `__. diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/RECORD b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/RECORD new file mode 100644 index 00000000..5347d565 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/RECORD @@ -0,0 +1,19 @@ +sniffio-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +sniffio-1.3.1.dist-info/LICENSE,sha256=ZSyHhIjRRWNh4Iw_hgf9e6WYkqFBA9Fczk_5PIW1zIs,185 +sniffio-1.3.1.dist-info/LICENSE.APACHE2,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +sniffio-1.3.1.dist-info/LICENSE.MIT,sha256=Pm2uVV65J4f8gtHUg1Vnf0VMf2Wus40_nnK_mj2vA0s,1046 +sniffio-1.3.1.dist-info/METADATA,sha256=CzGLVwmO3sz1heYKiJprantcQIbzqapi7_dqHTzuEtk,3875 +sniffio-1.3.1.dist-info/RECORD,, +sniffio-1.3.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +sniffio-1.3.1.dist-info/top_level.txt,sha256=v9UJXGs5CyddCVeAqXkQiWOrpp6Wtx6GeRrPt9-jjHg,8 +sniffio/__init__.py,sha256=9WJEJlXu7yluP0YtI5SQ9M9OTQfbNHkadarK1vXGDPM,335 +sniffio/__pycache__/__init__.cpython-311.pyc,, +sniffio/__pycache__/_impl.cpython-311.pyc,, +sniffio/__pycache__/_version.cpython-311.pyc,, +sniffio/_impl.py,sha256=UmUFMZpiuOrcjnuHhuYiYMxeCNWfqu9kBlaPf0xk6X8,2843 +sniffio/_tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sniffio/_tests/__pycache__/__init__.cpython-311.pyc,, +sniffio/_tests/__pycache__/test_sniffio.cpython-311.pyc,, +sniffio/_tests/test_sniffio.py,sha256=MMJZZJjQrUi95RANNM-a_55BZquA_gv4rHU1pevcTCM,2058 +sniffio/_version.py,sha256=iVes5xwsHeRzQDexBaAhyx_taNt2ucfA7CWAo4QDt6Q,89 +sniffio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/WHEEL b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/WHEEL new file mode 100644 index 00000000..98c0d20b --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/top_level.txt b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/top_level.txt new file mode 100644 index 00000000..01c65024 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio-1.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +sniffio diff --git a/.venv/Lib/site-packages/sniffio/__init__.py b/.venv/Lib/site-packages/sniffio/__init__.py new file mode 100644 index 00000000..63f2f19e --- /dev/null +++ b/.venv/Lib/site-packages/sniffio/__init__.py @@ -0,0 +1,17 @@ +"""Top-level package for sniffio.""" + +__all__ = [ + "current_async_library", + "AsyncLibraryNotFoundError", + "current_async_library_cvar", + "thread_local", +] + +from ._version import __version__ + +from ._impl import ( + current_async_library, + AsyncLibraryNotFoundError, + current_async_library_cvar, + thread_local, +) diff --git a/.venv/Lib/site-packages/sniffio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sniffio/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..69652294 Binary files /dev/null and b/.venv/Lib/site-packages/sniffio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sniffio/__pycache__/_impl.cpython-311.pyc b/.venv/Lib/site-packages/sniffio/__pycache__/_impl.cpython-311.pyc new file mode 100644 index 00000000..621e196d Binary files /dev/null and b/.venv/Lib/site-packages/sniffio/__pycache__/_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sniffio/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/sniffio/__pycache__/_version.cpython-311.pyc new file mode 100644 index 00000000..7513fafe Binary files /dev/null and b/.venv/Lib/site-packages/sniffio/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sniffio/_impl.py b/.venv/Lib/site-packages/sniffio/_impl.py new file mode 100644 index 00000000..c1a7bbf2 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio/_impl.py @@ -0,0 +1,95 @@ +from contextvars import ContextVar +from typing import Optional +import sys +import threading + +current_async_library_cvar = ContextVar( + "current_async_library_cvar", default=None +) # type: ContextVar[Optional[str]] + + +class _ThreadLocal(threading.local): + # Since threading.local provides no explicit mechanism is for setting + # a default for a value, a custom class with a class attribute is used + # instead. + name = None # type: Optional[str] + + +thread_local = _ThreadLocal() + + +class AsyncLibraryNotFoundError(RuntimeError): + pass + + +def current_async_library() -> str: + """Detect which async library is currently running. + + The following libraries are currently supported: + + ================ =========== ============================ + Library Requires Magic string + ================ =========== ============================ + **Trio** Trio v0.6+ ``"trio"`` + **Curio** - ``"curio"`` + **asyncio** ``"asyncio"`` + **Trio-asyncio** v0.8.2+ ``"trio"`` or ``"asyncio"``, + depending on current mode + ================ =========== ============================ + + Returns: + A string like ``"trio"``. + + Raises: + AsyncLibraryNotFoundError: if called from synchronous context, + or if the current async library was not recognized. + + Examples: + + .. code-block:: python3 + + from sniffio import current_async_library + + async def generic_sleep(seconds): + library = current_async_library() + if library == "trio": + import trio + await trio.sleep(seconds) + elif library == "asyncio": + import asyncio + await asyncio.sleep(seconds) + # ... and so on ... + else: + raise RuntimeError(f"Unsupported library {library!r}") + + """ + value = thread_local.name + if value is not None: + return value + + value = current_async_library_cvar.get() + if value is not None: + return value + + # Need to sniff for asyncio + if "asyncio" in sys.modules: + import asyncio + try: + current_task = asyncio.current_task # type: ignore[attr-defined] + except AttributeError: + current_task = asyncio.Task.current_task # type: ignore[attr-defined] + try: + if current_task() is not None: + return "asyncio" + except RuntimeError: + pass + + # Sniff for curio (for now) + if 'curio' in sys.modules: + from curio.meta import curio_running + if curio_running(): + return 'curio' + + raise AsyncLibraryNotFoundError( + "unknown async library, or not in async context" + ) diff --git a/.venv/Lib/site-packages/sniffio/_tests/__init__.py b/.venv/Lib/site-packages/sniffio/_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/sniffio/_tests/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sniffio/_tests/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 00000000..5270c961 Binary files /dev/null and b/.venv/Lib/site-packages/sniffio/_tests/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sniffio/_tests/__pycache__/test_sniffio.cpython-311.pyc b/.venv/Lib/site-packages/sniffio/_tests/__pycache__/test_sniffio.cpython-311.pyc new file mode 100644 index 00000000..ac878f49 Binary files /dev/null and b/.venv/Lib/site-packages/sniffio/_tests/__pycache__/test_sniffio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sniffio/_tests/test_sniffio.py b/.venv/Lib/site-packages/sniffio/_tests/test_sniffio.py new file mode 100644 index 00000000..02945a94 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio/_tests/test_sniffio.py @@ -0,0 +1,84 @@ +import os +import sys + +import pytest + +from .. import ( + current_async_library, AsyncLibraryNotFoundError, + current_async_library_cvar, thread_local +) + + +def test_basics_cvar(): + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + token = current_async_library_cvar.set("generic-lib") + try: + assert current_async_library() == "generic-lib" + finally: + current_async_library_cvar.reset(token) + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + +def test_basics_tlocal(): + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + old_name, thread_local.name = thread_local.name, "generic-lib" + try: + assert current_async_library() == "generic-lib" + finally: + thread_local.name = old_name + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + +def test_asyncio(): + import asyncio + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + ran = [] + + async def this_is_asyncio(): + assert current_async_library() == "asyncio" + # Call it a second time to exercise the caching logic + assert current_async_library() == "asyncio" + ran.append(True) + + asyncio.run(this_is_asyncio()) + assert ran == [True] + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason= + "curio broken on 3.12 (https://github.com/python-trio/sniffio/pull/42)", +) +def test_curio(): + import curio + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + ran = [] + + async def this_is_curio(): + assert current_async_library() == "curio" + # Call it a second time to exercise the caching logic + assert current_async_library() == "curio" + ran.append(True) + + curio.run(this_is_curio) + assert ran == [True] + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() diff --git a/.venv/Lib/site-packages/sniffio/_version.py b/.venv/Lib/site-packages/sniffio/_version.py new file mode 100644 index 00000000..0495d105 --- /dev/null +++ b/.venv/Lib/site-packages/sniffio/_version.py @@ -0,0 +1,3 @@ +# This file is imported from __init__.py and exec'd from setup.py + +__version__ = "1.3.1" diff --git a/.venv/Lib/site-packages/sniffio/py.typed b/.venv/Lib/site-packages/sniffio/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/soxr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/soxr/__pycache__/__init__.cpython-311.pyc index cf45800d..8e2738ac 100644 Binary files a/.venv/Lib/site-packages/soxr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/soxr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/soxr/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/soxr/__pycache__/_version.cpython-311.pyc index 6273479b..07adb458 100644 Binary files a/.venv/Lib/site-packages/soxr/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/soxr/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/__init__.cpython-311.pyc index ea429731..dee70ae4 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/about.cpython-311.pyc index 5a2aa959..eb9eb715 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/compat.cpython-311.pyc index 6abc2c96..65f96a65 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/errors.cpython-311.pyc index 7e1df2e5..77536808 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/git_info.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/git_info.cpython-311.pyc index e4c448aa..82ab0cae 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/git_info.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/git_info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/glossary.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/glossary.cpython-311.pyc index a16f7729..a50fb99c 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/glossary.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/glossary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/language.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/language.cpython-311.pyc index 45072131..b97784eb 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/language.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/language.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/lookups.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/lookups.cpython-311.pyc index 4a0ae411..9445ff16 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/lookups.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/lookups.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/pipe_analysis.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/pipe_analysis.cpython-311.pyc index dbb750db..15c52a04 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/pipe_analysis.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/pipe_analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/schemas.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/schemas.cpython-311.pyc index b1d518b5..6fbc7446 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/schemas.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/schemas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/scorer.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/scorer.cpython-311.pyc index b5bc7d38..24494e02 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/scorer.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/scorer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/ty.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/ty.cpython-311.pyc index c9d8cd8d..2ece0d21 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/ty.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/ty.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/spacy/__pycache__/util.cpython-311.pyc index 35909b78..7991ade5 100644 Binary files a/.venv/Lib/site-packages/spacy/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/__init__.cpython-311.pyc index 4f65176f..647df9fa 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/_util.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/_util.cpython-311.pyc index 90ec3ead..78b21794 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/_util.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/apply.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/apply.cpython-311.pyc index 90c0f146..71b357b7 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/apply.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/apply.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/assemble.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/assemble.cpython-311.pyc index e523cea0..8ba2cbb8 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/assemble.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/assemble.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/benchmark_speed.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/benchmark_speed.cpython-311.pyc index de6cf794..79f0865a 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/benchmark_speed.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/benchmark_speed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/convert.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/convert.cpython-311.pyc index d63e65df..05ca4ac4 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/convert.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/convert.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_config.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_config.cpython-311.pyc index 34b1b5ad..4215ff15 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_config.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_data.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_data.cpython-311.pyc index 2145c413..88b017cf 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_data.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_data.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_diff.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_diff.cpython-311.pyc index 49778e69..58b1b4b6 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_diff.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_diff.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_model.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_model.cpython-311.pyc index 9a334da5..9203d9a5 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_model.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/debug_model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/download.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/download.cpython-311.pyc index 9c07837d..c259e2eb 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/download.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/download.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/evaluate.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/evaluate.cpython-311.pyc index 810c1901..a2fc4731 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/evaluate.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/evaluate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/find_function.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/find_function.cpython-311.pyc index 860ca5ad..b249618a 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/find_function.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/find_function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/find_threshold.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/find_threshold.cpython-311.pyc index 40930819..6225e15c 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/find_threshold.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/find_threshold.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/info.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/info.cpython-311.pyc index 1d95004b..fb95d5c2 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/info.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/info.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/init_config.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/init_config.cpython-311.pyc index 0b6b81b5..1496b99c 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/init_config.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/init_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/init_pipeline.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/init_pipeline.cpython-311.pyc index 0c19d239..0a74aeaf 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/init_pipeline.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/init_pipeline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/package.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/package.cpython-311.pyc index 20e6f274..75a0451d 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/package.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/package.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/pretrain.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/pretrain.cpython-311.pyc index a1b933d6..14d4d2e9 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/pretrain.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/pretrain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/profile.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/profile.cpython-311.pyc index 54086b2d..5594d4a5 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/profile.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/profile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/train.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/train.cpython-311.pyc index b2a14e5f..e43cd6eb 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/train.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/train.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/__pycache__/validate.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/__pycache__/validate.cpython-311.pyc index 55b33c52..1d4c6891 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/__pycache__/validate.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/__pycache__/validate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/__init__.cpython-311.pyc index fad30408..c0b058cc 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/assets.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/assets.cpython-311.pyc index d9f6c72f..de716633 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/assets.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/assets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/clone.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/clone.cpython-311.pyc index 6b30e10f..f30b98c7 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/clone.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/clone.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/document.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/document.cpython-311.pyc index f7acee90..be87f97e 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/document.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/document.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/dvc.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/dvc.cpython-311.pyc index acbfb050..88628e46 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/dvc.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/dvc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/pull.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/pull.cpython-311.pyc index 648953e8..4702084e 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/pull.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/pull.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/push.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/push.cpython-311.pyc index 72c15c05..c9cb316d 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/push.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/push.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/run.cpython-311.pyc b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/run.cpython-311.pyc index 7e600e68..656d59de 100644 Binary files a/.venv/Lib/site-packages/spacy/cli/project/__pycache__/run.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/cli/project/__pycache__/run.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/displacy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/displacy/__pycache__/__init__.cpython-311.pyc index 81516f87..c3e45ae8 100644 Binary files a/.venv/Lib/site-packages/spacy/displacy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/displacy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/displacy/__pycache__/render.cpython-311.pyc b/.venv/Lib/site-packages/spacy/displacy/__pycache__/render.cpython-311.pyc index d8a46120..c21dcf76 100644 Binary files a/.venv/Lib/site-packages/spacy/displacy/__pycache__/render.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/displacy/__pycache__/render.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/displacy/__pycache__/templates.cpython-311.pyc b/.venv/Lib/site-packages/spacy/displacy/__pycache__/templates.cpython-311.pyc index a70be75e..a9f5da30 100644 Binary files a/.venv/Lib/site-packages/spacy/displacy/__pycache__/templates.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/displacy/__pycache__/templates.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/kb/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/kb/__pycache__/__init__.cpython-311.pyc index 6549c12b..caed1a49 100644 Binary files a/.venv/Lib/site-packages/spacy/kb/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/kb/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/__pycache__/__init__.cpython-311.pyc index 257027f2..797ff007 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/__pycache__/char_classes.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/__pycache__/char_classes.cpython-311.pyc index 4be7a2c8..f875cd52 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/__pycache__/char_classes.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/__pycache__/char_classes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/__pycache__/lex_attrs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/__pycache__/lex_attrs.cpython-311.pyc index 9df373ab..351b1a9c 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/__pycache__/lex_attrs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/__pycache__/lex_attrs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/__pycache__/norm_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/__pycache__/norm_exceptions.cpython-311.pyc index 7449830f..e5bbeac9 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/__pycache__/norm_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/__pycache__/norm_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/__pycache__/punctuation.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/__pycache__/punctuation.cpython-311.pyc index b94a8405..69abc0f6 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/__pycache__/punctuation.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/__pycache__/punctuation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/__pycache__/tokenizer_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/__pycache__/tokenizer_exceptions.cpython-311.pyc index 282265a7..afe6f97a 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/__pycache__/tokenizer_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/__pycache__/tokenizer_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/__init__.cpython-311.pyc index 8f521e8d..3720eb47 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/lex_attrs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/lex_attrs.cpython-311.pyc index f7a17e21..3d4107ae 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/lex_attrs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/lex_attrs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/punctuation.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/punctuation.cpython-311.pyc index c09a08ac..2d00240d 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/punctuation.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/punctuation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/stop_words.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/stop_words.cpython-311.pyc index 09ab0904..e99a142e 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/stop_words.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/stop_words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/tokenizer_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/tokenizer_exceptions.cpython-311.pyc index c45219c7..1006db44 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/tokenizer_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ar/__pycache__/tokenizer_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/__init__.cpython-311.pyc index 6c5e708d..ddfd6342 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lemmatizer.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lemmatizer.cpython-311.pyc index 80778c26..237c2b9e 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lemmatizer.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lemmatizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lex_attrs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lex_attrs.cpython-311.pyc index 9377b257..e5fbc494 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lex_attrs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/lex_attrs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/punctuation.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/punctuation.cpython-311.pyc index 97ecd1ca..55795b29 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/punctuation.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/punctuation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/stop_words.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/stop_words.cpython-311.pyc index c1c6fa5a..ca0c2f61 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/stop_words.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/stop_words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/syntax_iterators.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/syntax_iterators.cpython-311.pyc index abe5691c..221d8c35 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/syntax_iterators.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/syntax_iterators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/tokenizer_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/tokenizer_exceptions.cpython-311.pyc index 5b907b66..ca69cdf8 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/en/__pycache__/tokenizer_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/en/__pycache__/tokenizer_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/__init__.cpython-311.pyc index 64f6ecbe..d8a79700 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lemmatizer.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lemmatizer.cpython-311.pyc index 33f7fb6e..dd50c0ca 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lemmatizer.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lemmatizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lex_attrs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lex_attrs.cpython-311.pyc index 27eb06fd..cf6a3d9e 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lex_attrs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/lex_attrs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/punctuation.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/punctuation.cpython-311.pyc index a09ec3e4..52ce6827 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/punctuation.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/punctuation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/stop_words.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/stop_words.cpython-311.pyc index 5de8b942..90bbe353 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/stop_words.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/stop_words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/syntax_iterators.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/syntax_iterators.cpython-311.pyc index 5d04b3f6..ffa93284 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/syntax_iterators.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/syntax_iterators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/tokenizer_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/tokenizer_exceptions.cpython-311.pyc index 3b9656cd..3c389e87 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/es/__pycache__/tokenizer_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/es/__pycache__/tokenizer_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/__init__.cpython-311.pyc index 6be95f5e..0bb52ede 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/stop_words.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/stop_words.cpython-311.pyc index 9ab843a5..170c1aa9 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/stop_words.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/stop_words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/syntax_iterators.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/syntax_iterators.cpython-311.pyc index 3d24f90b..61024f63 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/syntax_iterators.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/syntax_iterators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_bigram_map.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_bigram_map.cpython-311.pyc index 752af63d..fe084ea1 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_bigram_map.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_bigram_map.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_map.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_map.cpython-311.pyc index 9e05a5da..cb470388 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_map.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_map.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_orth_map.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_orth_map.cpython-311.pyc index 32db1dcf..9f631455 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_orth_map.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/ja/__pycache__/tag_orth_map.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/xx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/xx/__pycache__/__init__.cpython-311.pyc index 37c4633b..f6cf34a3 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/xx/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/xx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/__init__.cpython-311.pyc index 79d6486c..d618c3bb 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/lex_attrs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/lex_attrs.cpython-311.pyc index 94a8bff7..f45990eb 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/lex_attrs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/lex_attrs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/stop_words.cpython-311.pyc b/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/stop_words.cpython-311.pyc index 866c21bb..9f90329a 100644 Binary files a/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/stop_words.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/lang/zh/__pycache__/stop_words.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/matcher/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/matcher/__pycache__/__init__.cpython-311.pyc index 91b52e22..85fbf477 100644 Binary files a/.venv/Lib/site-packages/spacy/matcher/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/matcher/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/__init__.cpython-311.pyc index 6ec917ab..e92284f3 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/_character_embed.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/_character_embed.cpython-311.pyc index c3df0e06..eb71c744 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/_character_embed.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/_character_embed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/_precomputable_affine.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/_precomputable_affine.cpython-311.pyc index fd433b76..d7235886 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/_precomputable_affine.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/_precomputable_affine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/callbacks.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/callbacks.cpython-311.pyc index 8c509b0d..dd71db28 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/callbacks.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/callbacks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_ngrams.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_ngrams.cpython-311.pyc index 54232c63..ce9c7bc1 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_ngrams.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_ngrams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_spans.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_spans.cpython-311.pyc index e1321fe1..295a258e 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_spans.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/extract_spans.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/featureextractor.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/featureextractor.cpython-311.pyc index 905fae51..cc528d0c 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/featureextractor.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/featureextractor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/staticvectors.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/staticvectors.cpython-311.pyc index 8c3945f1..eca9429d 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/staticvectors.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/staticvectors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/__pycache__/tb_framework.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/__pycache__/tb_framework.cpython-311.pyc index db124584..a6d26b6e 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/__pycache__/tb_framework.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/__pycache__/tb_framework.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/__init__.cpython-311.pyc index e498915e..7c50e798 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/entity_linker.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/entity_linker.cpython-311.pyc index 6cfc3b06..4627bf92 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/entity_linker.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/entity_linker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/multi_task.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/multi_task.cpython-311.pyc index be02c018..315bd17b 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/multi_task.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/multi_task.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/parser.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/parser.cpython-311.pyc index 01c48bd4..35a235b0 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/parser.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/span_finder.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/span_finder.cpython-311.pyc index 98ab5d7f..03532a09 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/span_finder.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/span_finder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/spancat.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/spancat.cpython-311.pyc index bc0a5338..53431412 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/spancat.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/spancat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tagger.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tagger.cpython-311.pyc index dc9a6cac..4be9d084 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tagger.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tagger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/textcat.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/textcat.cpython-311.pyc index 8a02f2e3..ec68f9c8 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/textcat.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/textcat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tok2vec.cpython-311.pyc b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tok2vec.cpython-311.pyc index d16a88d5..f0e0c97a 100644 Binary files a/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tok2vec.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/ml/models/__pycache__/tok2vec.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/__init__.cpython-311.pyc index 50ab6c35..cb9e78e3 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/attributeruler.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/attributeruler.cpython-311.pyc index af32df03..4fc53a39 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/attributeruler.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/attributeruler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/edit_tree_lemmatizer.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/edit_tree_lemmatizer.cpython-311.pyc index 9e6681b9..0b0776c5 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/edit_tree_lemmatizer.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/edit_tree_lemmatizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entity_linker.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entity_linker.cpython-311.pyc index c196c80d..f9981ed8 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entity_linker.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entity_linker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entityruler.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entityruler.cpython-311.pyc index ec00198f..b518cfb4 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entityruler.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/entityruler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/functions.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/functions.cpython-311.pyc index 3a65c50b..b66e8e07 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/functions.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/lemmatizer.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/lemmatizer.cpython-311.pyc index 56e095ee..abf856a5 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/lemmatizer.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/lemmatizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_finder.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_finder.cpython-311.pyc index ae506289..e525eb34 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_finder.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_finder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_ruler.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_ruler.cpython-311.pyc index 579ea1ff..0fc4fdc0 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_ruler.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/span_ruler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/spancat.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/spancat.cpython-311.pyc index 8f885db7..604b2247 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/spancat.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/spancat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat.cpython-311.pyc index 37df6d6e..f7a67594 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat_multilabel.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat_multilabel.cpython-311.pyc index 3de9640d..e3c243db 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat_multilabel.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/textcat_multilabel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/tok2vec.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/tok2vec.cpython-311.pyc index e2da2eb1..a483705f 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/__pycache__/tok2vec.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/__pycache__/tok2vec.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/__init__.cpython-311.pyc index 2b90f05d..ac92f0ff 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/schemas.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/schemas.cpython-311.pyc index 32b2cc12..6bdb8fdd 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/schemas.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/_edit_tree_internals/__pycache__/schemas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/_parser_internals/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/_parser_internals/__pycache__/__init__.cpython-311.pyc index 8bbd11a5..b191d10f 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/_parser_internals/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/_parser_internals/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/__init__.cpython-311.pyc index 6dafe272..e16b1dfd 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/entity_linker.cpython-311.pyc b/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/entity_linker.cpython-311.pyc index 9b13a482..301ff0a7 100644 Binary files a/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/entity_linker.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/pipeline/legacy/__pycache__/entity_linker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/tokens/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/tokens/__pycache__/__init__.cpython-311.pyc index 394c142e..a65af11e 100644 Binary files a/.venv/Lib/site-packages/spacy/tokens/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/tokens/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/tokens/__pycache__/_dict_proxies.cpython-311.pyc b/.venv/Lib/site-packages/spacy/tokens/__pycache__/_dict_proxies.cpython-311.pyc index 3b4b4c91..6ecc56d9 100644 Binary files a/.venv/Lib/site-packages/spacy/tokens/__pycache__/_dict_proxies.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/tokens/__pycache__/_dict_proxies.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/tokens/__pycache__/_serialize.cpython-311.pyc b/.venv/Lib/site-packages/spacy/tokens/__pycache__/_serialize.cpython-311.pyc index f8fc22b8..0ee4348a 100644 Binary files a/.venv/Lib/site-packages/spacy/tokens/__pycache__/_serialize.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/tokens/__pycache__/_serialize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/tokens/__pycache__/underscore.cpython-311.pyc b/.venv/Lib/site-packages/spacy/tokens/__pycache__/underscore.cpython-311.pyc index 127d7640..ea389e4e 100644 Binary files a/.venv/Lib/site-packages/spacy/tokens/__pycache__/underscore.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/tokens/__pycache__/underscore.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/__init__.cpython-311.pyc index 45004b61..5bdf6a17 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/alignment.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/alignment.cpython-311.pyc index 7f6b41ca..b5bf0167 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/alignment.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/alignment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/augment.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/augment.cpython-311.pyc index 13480101..d03348da 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/augment.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/augment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/batchers.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/batchers.cpython-311.pyc index 5a7933d8..dcd5ba18 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/batchers.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/batchers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/callbacks.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/callbacks.cpython-311.pyc index 7a3f5d26..c3d0cd99 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/callbacks.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/callbacks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/corpus.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/corpus.cpython-311.pyc index 254fb90b..49f756c1 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/corpus.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/corpus.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/initialize.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/initialize.cpython-311.pyc index f1379402..4158c2d9 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/initialize.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/initialize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/iob_utils.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/iob_utils.cpython-311.pyc index eea6c16e..1ef58070 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/iob_utils.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/iob_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/loggers.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/loggers.cpython-311.pyc index ec482865..df14d6b4 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/loggers.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/loggers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/loop.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/loop.cpython-311.pyc index 6e1a3d14..b72238ae 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/loop.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/loop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/__pycache__/pretrain.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/__pycache__/pretrain.cpython-311.pyc index 87cc9b63..a73ff414 100644 Binary files a/.venv/Lib/site-packages/spacy/training/__pycache__/pretrain.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/__pycache__/pretrain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/__init__.cpython-311.pyc index 2db16418..bfb1d882 100644 Binary files a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conll_ner_to_docs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conll_ner_to_docs.cpython-311.pyc index d45480d9..fa8a1dca 100644 Binary files a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conll_ner_to_docs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conll_ner_to_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conllu_to_docs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conllu_to_docs.cpython-311.pyc index a0487ff9..3c9d590b 100644 Binary files a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conllu_to_docs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/conllu_to_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/iob_to_docs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/iob_to_docs.cpython-311.pyc index efe18ac1..6c33e7c0 100644 Binary files a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/iob_to_docs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/iob_to_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/json_to_docs.cpython-311.pyc b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/json_to_docs.cpython-311.pyc index 5761f45d..dd42424a 100644 Binary files a/.venv/Lib/site-packages/spacy/training/converters/__pycache__/json_to_docs.cpython-311.pyc and b/.venv/Lib/site-packages/spacy/training/converters/__pycache__/json_to_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/srsly/__pycache__/__init__.cpython-311.pyc index eb447059..bed4bb76 100644 Binary files a/.venv/Lib/site-packages/srsly/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/__pycache__/_json_api.cpython-311.pyc b/.venv/Lib/site-packages/srsly/__pycache__/_json_api.cpython-311.pyc index eb70b9ba..c3dc04c6 100644 Binary files a/.venv/Lib/site-packages/srsly/__pycache__/_json_api.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/__pycache__/_json_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/__pycache__/_msgpack_api.cpython-311.pyc b/.venv/Lib/site-packages/srsly/__pycache__/_msgpack_api.cpython-311.pyc index 79f073f5..05044b08 100644 Binary files a/.venv/Lib/site-packages/srsly/__pycache__/_msgpack_api.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/__pycache__/_msgpack_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/__pycache__/_pickle_api.cpython-311.pyc b/.venv/Lib/site-packages/srsly/__pycache__/_pickle_api.cpython-311.pyc index e5a40505..72c07542 100644 Binary files a/.venv/Lib/site-packages/srsly/__pycache__/_pickle_api.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/__pycache__/_pickle_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/__pycache__/_yaml_api.cpython-311.pyc b/.venv/Lib/site-packages/srsly/__pycache__/_yaml_api.cpython-311.pyc index 58cc0cd0..8eba5d5d 100644 Binary files a/.venv/Lib/site-packages/srsly/__pycache__/_yaml_api.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/__pycache__/_yaml_api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/srsly/__pycache__/about.cpython-311.pyc index 9fff19e2..af6a5cd2 100644 Binary files a/.venv/Lib/site-packages/srsly/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/srsly/__pycache__/util.cpython-311.pyc index 90fea857..d2d02fd5 100644 Binary files a/.venv/Lib/site-packages/srsly/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/__init__.cpython-311.pyc index 1ae3662d..abf1bdfd 100644 Binary files a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc index c98d3893..c4da4ac5 100644 Binary files a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle_fast.cpython-311.pyc b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle_fast.cpython-311.pyc index 36b274f9..354e4d6c 100644 Binary files a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle_fast.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/cloudpickle_fast.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/compat.cpython-311.pyc index 4f8c21ea..1cb6847e 100644 Binary files a/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/cloudpickle/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/__init__.cpython-311.pyc index bc3c799f..fba2e53f 100644 Binary files a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_ext_type.cpython-311.pyc b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_ext_type.cpython-311.pyc index 41aba90c..1b7956b8 100644 Binary files a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_ext_type.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_ext_type.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_msgpack_numpy.cpython-311.pyc b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_msgpack_numpy.cpython-311.pyc index 8cf0c605..2053191d 100644 Binary files a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_msgpack_numpy.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_msgpack_numpy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_version.cpython-311.pyc index b8c7cb51..a55a7a0f 100644 Binary files a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/exceptions.cpython-311.pyc index b3525cba..c933a899 100644 Binary files a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/util.cpython-311.pyc index 24776667..5a9e8707 100644 Binary files a/.venv/Lib/site-packages/srsly/msgpack/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/msgpack/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/__init__.cpython-311.pyc index c4b58db8..3e0f4cc4 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/anchor.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/anchor.cpython-311.pyc index 0305cf20..6a382f91 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/anchor.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/anchor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/comments.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/comments.cpython-311.pyc index 7668e331..79832503 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/comments.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/comments.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/compat.cpython-311.pyc index abdaad1c..2ea1c373 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/composer.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/composer.cpython-311.pyc index b2566934..531dd393 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/composer.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/composer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/constructor.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/constructor.cpython-311.pyc index ded519c8..1c1e0e60 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/constructor.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/constructor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/dumper.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/dumper.cpython-311.pyc index 93314716..56536240 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/dumper.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/dumper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/emitter.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/emitter.cpython-311.pyc index 3eaaceba..6ede0f49 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/emitter.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/emitter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/error.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/error.cpython-311.pyc index e8c9b5c4..2994bd28 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/error.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/error.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/events.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/events.cpython-311.pyc index 36a06c98..ae4a845c 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/events.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/events.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/loader.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/loader.cpython-311.pyc index 209bbbf3..965ed556 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/loader.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/loader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/main.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/main.cpython-311.pyc index ec8bfc6d..1af1d64b 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/main.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/nodes.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/nodes.cpython-311.pyc index 7e3dede5..25f8d200 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/nodes.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/nodes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/parser.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/parser.cpython-311.pyc index 8f2e9935..6979ea1f 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/parser.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/reader.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/reader.cpython-311.pyc index 0052787a..ca9aae05 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/reader.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/reader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/representer.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/representer.cpython-311.pyc index 08d87c8e..cf07b132 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/representer.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/representer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/resolver.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/resolver.cpython-311.pyc index acd63c09..c08c93e5 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/resolver.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/resolver.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarbool.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarbool.cpython-311.pyc index 3958a992..aa2ec674 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarbool.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarbool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarfloat.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarfloat.cpython-311.pyc index 5270326c..683b5da8 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarfloat.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarfloat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarint.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarint.cpython-311.pyc index bdc2afe9..e5b0f664 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarint.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarstring.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarstring.cpython-311.pyc index d3696b53..a4ebe2c4 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarstring.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scalarstring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scanner.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scanner.cpython-311.pyc index 137bb6f3..a2fa6dfa 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scanner.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/scanner.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/serializer.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/serializer.cpython-311.pyc index 7fef70a8..4633cbd6 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/serializer.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/serializer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/timestamp.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/timestamp.cpython-311.pyc index 25450848..b56d87c7 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/timestamp.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/timestamp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/tokens.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/tokens.cpython-311.pyc index dd2adf37..7c177256 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/tokens.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/tokens.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/util.cpython-311.pyc index 1bc24d53..fb4caaf6 100644 Binary files a/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ruamel_yaml/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/srsly/ujson/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/srsly/ujson/__pycache__/__init__.cpython-311.pyc index d323c60a..edcde700 100644 Binary files a/.venv/Lib/site-packages/srsly/ujson/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/srsly/ujson/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/__pycache__/__init__.cpython-311.pyc index 23f445d3..20c43f92 100644 Binary files a/.venv/Lib/site-packages/sympy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/__pycache__/release.cpython-311.pyc b/.venv/Lib/site-packages/sympy/__pycache__/release.cpython-311.pyc index 4335e6c9..032bec83 100644 Binary files a/.venv/Lib/site-packages/sympy/__pycache__/release.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/__pycache__/release.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/algebras/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/algebras/__pycache__/__init__.cpython-311.pyc index 9c1407f8..837c3a33 100644 Binary files a/.venv/Lib/site-packages/sympy/algebras/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/algebras/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/algebras/__pycache__/quaternion.cpython-311.pyc b/.venv/Lib/site-packages/sympy/algebras/__pycache__/quaternion.cpython-311.pyc index fef43a35..cb0102e9 100644 Binary files a/.venv/Lib/site-packages/sympy/algebras/__pycache__/quaternion.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/algebras/__pycache__/quaternion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/__init__.cpython-311.pyc index 518e88d6..7217eee2 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask.cpython-311.pyc index 5e82fafb..e2ac56e7 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask_generated.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask_generated.cpython-311.pyc index f31f798e..60bb1ea5 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask_generated.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/ask_generated.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/assume.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/assume.cpython-311.pyc index f1ebe6b7..535e4f12 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/assume.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/assume.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/cnf.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/cnf.cpython-311.pyc index 7dd6147d..27320fcf 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/cnf.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/cnf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/refine.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/refine.cpython-311.pyc index 6926483c..8369b8c3 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/__pycache__/refine.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/__pycache__/refine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/__init__.cpython-311.pyc index fb3e0428..4867beab 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/binrel.cpython-311.pyc b/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/binrel.cpython-311.pyc index c2c0d246..5965b344 100644 Binary files a/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/binrel.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/assumptions/relation/__pycache__/binrel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/calculus/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/calculus/__pycache__/__init__.cpython-311.pyc index 9cc7fc2f..61649ed3 100644 Binary files a/.venv/Lib/site-packages/sympy/calculus/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/calculus/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/calculus/__pycache__/accumulationbounds.cpython-311.pyc b/.venv/Lib/site-packages/sympy/calculus/__pycache__/accumulationbounds.cpython-311.pyc index d186c42b..dc03e0e9 100644 Binary files a/.venv/Lib/site-packages/sympy/calculus/__pycache__/accumulationbounds.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/calculus/__pycache__/accumulationbounds.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/calculus/__pycache__/euler.cpython-311.pyc b/.venv/Lib/site-packages/sympy/calculus/__pycache__/euler.cpython-311.pyc index 94eeaa0f..e39409bc 100644 Binary files a/.venv/Lib/site-packages/sympy/calculus/__pycache__/euler.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/calculus/__pycache__/euler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/calculus/__pycache__/finite_diff.cpython-311.pyc b/.venv/Lib/site-packages/sympy/calculus/__pycache__/finite_diff.cpython-311.pyc index a4bace03..dccf56ab 100644 Binary files a/.venv/Lib/site-packages/sympy/calculus/__pycache__/finite_diff.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/calculus/__pycache__/finite_diff.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/calculus/__pycache__/singularities.cpython-311.pyc b/.venv/Lib/site-packages/sympy/calculus/__pycache__/singularities.cpython-311.pyc index 7c0bb4d9..987f4f38 100644 Binary files a/.venv/Lib/site-packages/sympy/calculus/__pycache__/singularities.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/calculus/__pycache__/singularities.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/calculus/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/sympy/calculus/__pycache__/util.cpython-311.pyc index c987fa1e..e1c49a74 100644 Binary files a/.venv/Lib/site-packages/sympy/calculus/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/calculus/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/concrete/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/concrete/__pycache__/__init__.cpython-311.pyc index 49a7d4bc..a5679be8 100644 Binary files a/.venv/Lib/site-packages/sympy/concrete/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/concrete/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_intlimits.cpython-311.pyc b/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_intlimits.cpython-311.pyc index 2c927703..c5e6a39f 100644 Binary files a/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_intlimits.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_intlimits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_limits.cpython-311.pyc b/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_limits.cpython-311.pyc index 8e179103..f5de8e52 100644 Binary files a/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_limits.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/concrete/__pycache__/expr_with_limits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/concrete/__pycache__/gosper.cpython-311.pyc b/.venv/Lib/site-packages/sympy/concrete/__pycache__/gosper.cpython-311.pyc index 26ba215d..237f94ae 100644 Binary files a/.venv/Lib/site-packages/sympy/concrete/__pycache__/gosper.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/concrete/__pycache__/gosper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/concrete/__pycache__/products.cpython-311.pyc b/.venv/Lib/site-packages/sympy/concrete/__pycache__/products.cpython-311.pyc index a6abc5a3..466f41b0 100644 Binary files a/.venv/Lib/site-packages/sympy/concrete/__pycache__/products.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/concrete/__pycache__/products.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/concrete/__pycache__/summations.cpython-311.pyc b/.venv/Lib/site-packages/sympy/concrete/__pycache__/summations.cpython-311.pyc index 0b6655f0..6b10db8e 100644 Binary files a/.venv/Lib/site-packages/sympy/concrete/__pycache__/summations.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/concrete/__pycache__/summations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/__init__.cpython-311.pyc index c4b0aa1b..d53df969 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/_print_helpers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/_print_helpers.cpython-311.pyc index 6a22b2bb..408acabf 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/_print_helpers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/_print_helpers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/add.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/add.cpython-311.pyc index 96c2b2c1..350b5163 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/add.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/add.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/alphabets.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/alphabets.cpython-311.pyc index 14210df6..3ab180a3 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/alphabets.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/alphabets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions.cpython-311.pyc index f2341a02..799f390b 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions_generated.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions_generated.cpython-311.pyc index 68e8c771..707f62f5 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions_generated.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/assumptions_generated.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/basic.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/basic.cpython-311.pyc index 0e2d0fd9..4b70eaf1 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/basic.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/basic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/cache.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/cache.cpython-311.pyc index 827c4451..3a26cd73 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/cache.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/containers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/containers.cpython-311.pyc index 475086a1..b780ddbb 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/containers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/containers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/core.cpython-311.pyc index da92100a..9e9b11b7 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/coreerrors.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/coreerrors.cpython-311.pyc index fc4b734e..7063b979 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/coreerrors.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/coreerrors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/decorators.cpython-311.pyc index 41cb27a9..930fab91 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/evalf.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/evalf.cpython-311.pyc index c8bc0d88..aa86a250 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/evalf.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/evalf.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/expr.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/expr.cpython-311.pyc index 86065441..7bf79029 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/expr.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/expr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/exprtools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/exprtools.cpython-311.pyc index 5cc89bc2..53a6a2f5 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/exprtools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/exprtools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/facts.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/facts.cpython-311.pyc index 1a9f092d..d9609f4e 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/facts.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/facts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/function.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/function.cpython-311.pyc index 3d4b9637..cf50ad50 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/function.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/kind.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/kind.cpython-311.pyc index 40072dd0..eb155b44 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/kind.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/kind.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/logic.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/logic.cpython-311.pyc index 5e36028b..ef6b3acc 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/logic.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/logic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/mod.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/mod.cpython-311.pyc index 2bc679ad..9fa9e768 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/mod.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/mod.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/mul.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/mul.cpython-311.pyc index b0fcdd6e..6906955c 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/mul.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/mul.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/multidimensional.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/multidimensional.cpython-311.pyc index 20870b0b..c1d596d8 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/multidimensional.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/multidimensional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/numbers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/numbers.cpython-311.pyc index c80cbe09..98ddb5a4 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/numbers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/numbers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/operations.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/operations.cpython-311.pyc index d19c02f8..feeebd17 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/operations.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/operations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/parameters.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/parameters.cpython-311.pyc index b92eeaef..00266d1e 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/parameters.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/parameters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/power.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/power.cpython-311.pyc index d6ae59b6..651078ac 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/power.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/power.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/random.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/random.cpython-311.pyc index bfa4ae63..80c0c351 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/random.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/random.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/relational.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/relational.cpython-311.pyc index f1dfdfd4..1cdcd671 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/relational.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/relational.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/rules.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/rules.cpython-311.pyc index a00fe79a..80cd8630 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/rules.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/rules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/singleton.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/singleton.cpython-311.pyc index 4eb3ca42..bd78060d 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/singleton.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/singleton.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/sorting.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/sorting.cpython-311.pyc index df997585..d258beb7 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/sorting.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/sorting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/symbol.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/symbol.cpython-311.pyc index 0407c30d..03a35ff6 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/symbol.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/symbol.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/sympify.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/sympify.cpython-311.pyc index a069cd12..2208d370 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/sympify.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/sympify.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/core/__pycache__/traversal.cpython-311.pyc b/.venv/Lib/site-packages/sympy/core/__pycache__/traversal.cpython-311.pyc index 281834f1..90b12c35 100644 Binary files a/.venv/Lib/site-packages/sympy/core/__pycache__/traversal.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/core/__pycache__/traversal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/discrete/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/discrete/__pycache__/__init__.cpython-311.pyc index 5b5d09ea..89c5ba9d 100644 Binary files a/.venv/Lib/site-packages/sympy/discrete/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/discrete/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/discrete/__pycache__/convolutions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/discrete/__pycache__/convolutions.cpython-311.pyc index a3907c7b..124e730c 100644 Binary files a/.venv/Lib/site-packages/sympy/discrete/__pycache__/convolutions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/discrete/__pycache__/convolutions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/discrete/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/sympy/discrete/__pycache__/transforms.cpython-311.pyc index 0d3549b4..955efa20 100644 Binary files a/.venv/Lib/site-packages/sympy/discrete/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/discrete/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/external/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/external/__pycache__/__init__.cpython-311.pyc index f6ba9071..70dbb744 100644 Binary files a/.venv/Lib/site-packages/sympy/external/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/external/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/external/__pycache__/gmpy.cpython-311.pyc b/.venv/Lib/site-packages/sympy/external/__pycache__/gmpy.cpython-311.pyc index 75f8a78d..2c9b729b 100644 Binary files a/.venv/Lib/site-packages/sympy/external/__pycache__/gmpy.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/external/__pycache__/gmpy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/external/__pycache__/importtools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/external/__pycache__/importtools.cpython-311.pyc index a89bb177..f94d9164 100644 Binary files a/.venv/Lib/site-packages/sympy/external/__pycache__/importtools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/external/__pycache__/importtools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/external/__pycache__/pythonmpq.cpython-311.pyc b/.venv/Lib/site-packages/sympy/external/__pycache__/pythonmpq.cpython-311.pyc index ede9446f..7b0ace43 100644 Binary files a/.venv/Lib/site-packages/sympy/external/__pycache__/pythonmpq.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/external/__pycache__/pythonmpq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/__pycache__/__init__.cpython-311.pyc index f09b467a..08270144 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/__init__.cpython-311.pyc index 46f19074..50527d92 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/factorials.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/factorials.cpython-311.pyc index fbc128c5..9351cbab 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/factorials.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/factorials.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/numbers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/numbers.cpython-311.pyc index 691b59ea..7ff3d790 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/numbers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/combinatorial/__pycache__/numbers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/__init__.cpython-311.pyc index e9403730..6073c507 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/_trigonometric_special.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/_trigonometric_special.cpython-311.pyc index 47b693d5..c372f1fc 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/_trigonometric_special.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/_trigonometric_special.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/complexes.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/complexes.cpython-311.pyc index 29a8b9a6..28f1581d 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/complexes.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/complexes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/exponential.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/exponential.cpython-311.pyc index d5fce77c..f81e2a4c 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/exponential.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/exponential.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/hyperbolic.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/hyperbolic.cpython-311.pyc index def80495..746b4c89 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/hyperbolic.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/hyperbolic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/integers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/integers.cpython-311.pyc index b874bf16..a087baad 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/integers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/integers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/miscellaneous.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/miscellaneous.cpython-311.pyc index e116a83a..7600c843 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/miscellaneous.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/miscellaneous.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/piecewise.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/piecewise.cpython-311.pyc index 5a4e2cde..463487b0 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/piecewise.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/piecewise.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/trigonometric.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/trigonometric.cpython-311.pyc index b49c529e..5cecf998 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/trigonometric.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/elementary/__pycache__/trigonometric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/__init__.cpython-311.pyc index 1a30327c..dfc1aad9 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bessel.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bessel.cpython-311.pyc index db76b7ab..f983d079 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bessel.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bessel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/beta_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/beta_functions.cpython-311.pyc index 19248c8d..233ae0de 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/beta_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/beta_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bsplines.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bsplines.cpython-311.pyc index 94604968..3de906de 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bsplines.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/bsplines.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/delta_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/delta_functions.cpython-311.pyc index 095e225a..b1d99666 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/delta_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/delta_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/elliptic_integrals.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/elliptic_integrals.cpython-311.pyc index 27ec9bf9..a86f640f 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/elliptic_integrals.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/elliptic_integrals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/error_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/error_functions.cpython-311.pyc index e428717f..9c3af5e0 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/error_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/error_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/gamma_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/gamma_functions.cpython-311.pyc index 53ca075b..a4115b81 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/gamma_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/gamma_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/hyper.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/hyper.cpython-311.pyc index fdc3376e..178a50ed 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/hyper.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/hyper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/mathieu_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/mathieu_functions.cpython-311.pyc index 122c39ed..8a82863b 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/mathieu_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/mathieu_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/polynomials.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/polynomials.cpython-311.pyc index ccd5a614..821394ae 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/polynomials.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/polynomials.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/singularity_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/singularity_functions.cpython-311.pyc index 468e8709..c889ecc1 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/singularity_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/singularity_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/spherical_harmonics.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/spherical_harmonics.cpython-311.pyc index 929379c2..855fc008 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/spherical_harmonics.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/spherical_harmonics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/tensor_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/tensor_functions.cpython-311.pyc index 60b658aa..651dee12 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/tensor_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/tensor_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/zeta_functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/zeta_functions.cpython-311.pyc index fb1df548..2a969184 100644 Binary files a/.venv/Lib/site-packages/sympy/functions/special/__pycache__/zeta_functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/functions/special/__pycache__/zeta_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/__init__.cpython-311.pyc index 1b404f9e..d13889d4 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/curve.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/curve.cpython-311.pyc index 29432339..7fe2a4b0 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/curve.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/curve.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/ellipse.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/ellipse.cpython-311.pyc index 9dadc40a..c1bd06dd 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/ellipse.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/ellipse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/entity.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/entity.cpython-311.pyc index 6611ac5e..d952ef39 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/entity.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/entity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/exceptions.cpython-311.pyc index 81e22c2c..eb889e9a 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/line.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/line.cpython-311.pyc index ac7af746..4f5241b0 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/line.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/line.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/parabola.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/parabola.cpython-311.pyc index e32665cf..2e78eb77 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/parabola.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/parabola.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/plane.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/plane.cpython-311.pyc index 9f7e5fcf..3d97ddf3 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/plane.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/plane.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/point.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/point.cpython-311.pyc index 081d3e02..5a528250 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/point.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/point.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/polygon.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/polygon.cpython-311.pyc index d680a1de..c747a9aa 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/polygon.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/polygon.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/geometry/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/sympy/geometry/__pycache__/util.cpython-311.pyc index 60909eea..1d20f801 100644 Binary files a/.venv/Lib/site-packages/sympy/geometry/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/geometry/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/__init__.cpython-311.pyc index bf8b6ed0..e4931724 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/deltafunctions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/deltafunctions.cpython-311.pyc index caab5060..b4a2d566 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/deltafunctions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/deltafunctions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/integrals.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/integrals.cpython-311.pyc index 9aa1c756..565b2abc 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/integrals.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/integrals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/laplace.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/laplace.cpython-311.pyc index 58200944..a5b05c95 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/laplace.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/laplace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/meijerint.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/meijerint.cpython-311.pyc index 9c8db0b3..71df99ce 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/meijerint.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/meijerint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/rationaltools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/rationaltools.cpython-311.pyc index 6ab50832..09234b16 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/rationaltools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/rationaltools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/singularityfunctions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/singularityfunctions.cpython-311.pyc index 0e12905e..75515141 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/singularityfunctions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/singularityfunctions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/transforms.cpython-311.pyc index 4be9bc31..1898a0c9 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/integrals/__pycache__/trigonometry.cpython-311.pyc b/.venv/Lib/site-packages/sympy/integrals/__pycache__/trigonometry.cpython-311.pyc index 429022cb..c2aad9eb 100644 Binary files a/.venv/Lib/site-packages/sympy/integrals/__pycache__/trigonometry.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/integrals/__pycache__/trigonometry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/interactive/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/interactive/__pycache__/__init__.cpython-311.pyc index f9aed25a..afecbec2 100644 Binary files a/.venv/Lib/site-packages/sympy/interactive/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/interactive/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/interactive/__pycache__/printing.cpython-311.pyc b/.venv/Lib/site-packages/sympy/interactive/__pycache__/printing.cpython-311.pyc index 0b8175c0..a80aaf2d 100644 Binary files a/.venv/Lib/site-packages/sympy/interactive/__pycache__/printing.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/interactive/__pycache__/printing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/interactive/__pycache__/session.cpython-311.pyc b/.venv/Lib/site-packages/sympy/interactive/__pycache__/session.cpython-311.pyc index a3fc29f0..9aa841fe 100644 Binary files a/.venv/Lib/site-packages/sympy/interactive/__pycache__/session.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/interactive/__pycache__/session.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/interactive/__pycache__/traversal.cpython-311.pyc b/.venv/Lib/site-packages/sympy/interactive/__pycache__/traversal.cpython-311.pyc index 5bdbea9b..a2eb74aa 100644 Binary files a/.venv/Lib/site-packages/sympy/interactive/__pycache__/traversal.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/interactive/__pycache__/traversal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/logic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/logic/__pycache__/__init__.cpython-311.pyc index 754afe9d..0d30be38 100644 Binary files a/.venv/Lib/site-packages/sympy/logic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/logic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/logic/__pycache__/boolalg.cpython-311.pyc b/.venv/Lib/site-packages/sympy/logic/__pycache__/boolalg.cpython-311.pyc index 31d9f5ea..e96c23fc 100644 Binary files a/.venv/Lib/site-packages/sympy/logic/__pycache__/boolalg.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/logic/__pycache__/boolalg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/logic/__pycache__/inference.cpython-311.pyc b/.venv/Lib/site-packages/sympy/logic/__pycache__/inference.cpython-311.pyc index b660bd43..50088d87 100644 Binary files a/.venv/Lib/site-packages/sympy/logic/__pycache__/inference.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/logic/__pycache__/inference.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/__init__.cpython-311.pyc index 0905e9e1..38343c69 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/common.cpython-311.pyc index 67b2bef7..90ad9fdb 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/decompositions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/decompositions.cpython-311.pyc index 9a2fdf6e..d6326e05 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/decompositions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/decompositions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/dense.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/dense.cpython-311.pyc index 26cbbe68..68a07520 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/dense.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/dense.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/determinant.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/determinant.cpython-311.pyc index 9dc31098..e30dd59c 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/determinant.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/determinant.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/eigen.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/eigen.cpython-311.pyc index 1297140e..5ad4b0d8 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/eigen.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/eigen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/graph.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/graph.cpython-311.pyc index 25f2bcb4..c6cca433 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/graph.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/immutable.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/immutable.cpython-311.pyc index cfd1f024..6030d143 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/immutable.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/immutable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/inverse.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/inverse.cpython-311.pyc index fc053016..657524b7 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/inverse.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/inverse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/matrices.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/matrices.cpython-311.pyc index 8c9893c7..af5ab2f1 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/matrices.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/matrices.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/reductions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/reductions.cpython-311.pyc index ac055251..09f4a4a4 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/reductions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/reductions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/repmatrix.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/repmatrix.cpython-311.pyc index 45db2322..55387398 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/repmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/repmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/solvers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/solvers.cpython-311.pyc index 5f6abdbc..3293a5d0 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/solvers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/solvers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparse.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparse.cpython-311.pyc index 9673af20..80c02ade 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparse.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparsetools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparsetools.cpython-311.pyc index 6ae52b0f..73c43109 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparsetools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/sparsetools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/subspaces.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/subspaces.cpython-311.pyc index a40b5ecc..87702d14 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/subspaces.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/subspaces.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/__pycache__/utilities.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/__pycache__/utilities.cpython-311.pyc index 93953976..d43d1792 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/__pycache__/utilities.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/__pycache__/utilities.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/__init__.cpython-311.pyc index a0c9faa3..170ae8b3 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/_shape.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/_shape.cpython-311.pyc index 6188d433..1e946996 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/_shape.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/_shape.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/adjoint.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/adjoint.cpython-311.pyc index b9a78570..b2649a6b 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/adjoint.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/adjoint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/blockmatrix.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/blockmatrix.cpython-311.pyc index 0c913f48..e5ce15e8 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/blockmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/blockmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/companion.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/companion.cpython-311.pyc index dc28dc53..00427a32 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/companion.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/companion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/determinant.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/determinant.cpython-311.pyc index 6f29533c..a2194356 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/determinant.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/determinant.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/diagonal.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/diagonal.cpython-311.pyc index f3e4ee7a..8778d4eb 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/diagonal.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/diagonal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/dotproduct.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/dotproduct.cpython-311.pyc index 672f2d10..58b72ab1 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/dotproduct.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/dotproduct.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/funcmatrix.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/funcmatrix.cpython-311.pyc index 326ab030..c21b5af5 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/funcmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/funcmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/hadamard.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/hadamard.cpython-311.pyc index f6034fc1..6bd84de2 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/hadamard.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/hadamard.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/inverse.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/inverse.cpython-311.pyc index 917d56aa..329d1b75 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/inverse.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/inverse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/kronecker.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/kronecker.cpython-311.pyc index a377988c..78a43c70 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/kronecker.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/kronecker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matadd.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matadd.cpython-311.pyc index dab51c1e..958b4abe 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matadd.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matadd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matexpr.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matexpr.cpython-311.pyc index f4698fcf..a5bbf4ed 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matexpr.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matexpr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matmul.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matmul.cpython-311.pyc index 4bb39024..2c0de51d 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matmul.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matmul.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matpow.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matpow.cpython-311.pyc index e2567ae6..f27a7376 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matpow.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/matpow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/permutation.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/permutation.cpython-311.pyc index 7f04d1eb..3c5c445d 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/permutation.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/permutation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/sets.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/sets.cpython-311.pyc index af3bb8e5..f1de6218 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/sets.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/sets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/slice.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/slice.cpython-311.pyc index 026ce225..45744ad5 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/slice.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/slice.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/special.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/special.cpython-311.pyc index ac5cff00..2896c6db 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/special.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/special.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/trace.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/trace.cpython-311.pyc index 537f7ff3..5f0e35eb 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/trace.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/trace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/transpose.cpython-311.pyc b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/transpose.cpython-311.pyc index 0b2d5471..3e3c84ba 100644 Binary files a/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/transpose.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/matrices/expressions/__pycache__/transpose.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/__init__.cpython-311.pyc index 8cd69f7b..b1373a3a 100644 Binary files a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/conflict.cpython-311.pyc b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/conflict.cpython-311.pyc index 9c38cf85..6c7ea954 100644 Binary files a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/conflict.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/conflict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/core.cpython-311.pyc index f85895c5..4d939c04 100644 Binary files a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/dispatcher.cpython-311.pyc b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/dispatcher.cpython-311.pyc index 83da2538..71963cf2 100644 Binary files a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/dispatcher.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/dispatcher.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/utils.cpython-311.pyc index a537bdb1..544afe36 100644 Binary files a/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/multipledispatch/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/__init__.cpython-311.pyc index 4275bbc6..ec977f8f 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/continued_fraction.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/continued_fraction.cpython-311.pyc index d99664b9..f8a4648d 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/continued_fraction.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/continued_fraction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/digits.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/digits.cpython-311.pyc index 60c98879..24029958 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/digits.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/digits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/ecm.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/ecm.cpython-311.pyc index 27d3f6a1..78758db3 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/ecm.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/ecm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/egyptian_fraction.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/egyptian_fraction.cpython-311.pyc index 174afe7b..15ecf8ff 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/egyptian_fraction.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/egyptian_fraction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/factor_.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/factor_.cpython-311.pyc index d7d4d3ab..46203974 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/factor_.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/factor_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/generate.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/generate.cpython-311.pyc index f16bfcd7..ab034a4e 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/generate.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/generate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/modular.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/modular.cpython-311.pyc index d2728950..c2a0423e 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/modular.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/modular.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/multinomial.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/multinomial.cpython-311.pyc index 5692444a..76b80158 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/multinomial.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/multinomial.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/partitions_.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/partitions_.cpython-311.pyc index ce246c6a..03989ce0 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/partitions_.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/partitions_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/primetest.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/primetest.cpython-311.pyc index 1c0e4177..dbe87428 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/primetest.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/primetest.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/qs.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/qs.cpython-311.pyc index db2e35b4..29a641fc 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/qs.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/qs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/residue_ntheory.cpython-311.pyc b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/residue_ntheory.cpython-311.pyc index 26edb367..13ca9ece 100644 Binary files a/.venv/Lib/site-packages/sympy/ntheory/__pycache__/residue_ntheory.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/ntheory/__pycache__/residue_ntheory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/parsing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/parsing/__pycache__/__init__.cpython-311.pyc index 34ff251c..f66b3fb7 100644 Binary files a/.venv/Lib/site-packages/sympy/parsing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/parsing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-311.pyc b/.venv/Lib/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-311.pyc index e3282661..e076b2ed 100644 Binary files a/.venv/Lib/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/__pycache__/__init__.cpython-311.pyc index 79414d44..cfa60461 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/__pycache__/experimental_lambdify.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/__pycache__/experimental_lambdify.cpython-311.pyc index a027cd81..d9d67a7e 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/__pycache__/experimental_lambdify.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/__pycache__/experimental_lambdify.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot.cpython-311.pyc index f0057633..2341b333 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot_implicit.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot_implicit.cpython-311.pyc index 67328791..dbd3ab46 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot_implicit.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/__pycache__/plot_implicit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/__pycache__/textplot.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/__pycache__/textplot.cpython-311.pyc index 75a924a8..1f1d3450 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/__pycache__/textplot.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/__pycache__/textplot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/__init__.cpython-311.pyc index 18f3fbdf..75a83ffc 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_arithmetic.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_arithmetic.cpython-311.pyc index a5e9bd69..18651930 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_arithmetic.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_arithmetic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_membership.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_membership.cpython-311.pyc index 3d70b017..ab11bce9 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_membership.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/interval_membership.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/lib_interval.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/lib_interval.cpython-311.pyc index 0ec0b444..d96904f1 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/lib_interval.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/intervalmath/__pycache__/lib_interval.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/plotting/pygletplot/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/plotting/pygletplot/__pycache__/__init__.cpython-311.pyc index 3ee6b902..60dcc1b1 100644 Binary files a/.venv/Lib/site-packages/sympy/plotting/pygletplot/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/plotting/pygletplot/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/__init__.cpython-311.pyc index 8158b652..5c407746 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/appellseqs.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/appellseqs.cpython-311.pyc index 6e7482be..030b2b36 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/appellseqs.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/appellseqs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/compatibility.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/compatibility.cpython-311.pyc index 71f6177e..ec1bf5d2 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/compatibility.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/compatibility.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/constructor.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/constructor.cpython-311.pyc index 4cbb64e4..c28987f5 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/constructor.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/constructor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/densearith.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/densearith.cpython-311.pyc index 98bbd513..023e602c 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/densearith.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/densearith.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/densebasic.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/densebasic.cpython-311.pyc index b1aed129..2bd41915 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/densebasic.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/densebasic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/densetools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/densetools.cpython-311.pyc index fdc9bb3a..63cbb543 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/densetools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/densetools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/domainmatrix.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/domainmatrix.cpython-311.pyc index 6273fb7b..22be36bd 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/domainmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/domainmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/euclidtools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/euclidtools.cpython-311.pyc index 37342318..f0d6c8a8 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/euclidtools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/euclidtools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/factortools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/factortools.cpython-311.pyc index b35704fd..9ecfb467 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/factortools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/factortools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/fglmtools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/fglmtools.cpython-311.pyc index df0ff10a..f6f9c368 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/fglmtools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/fglmtools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/fields.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/fields.cpython-311.pyc index e023916e..ba3ffc98 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/fields.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/fields.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/galoistools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/galoistools.cpython-311.pyc index fc16f104..2a0235b4 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/galoistools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/galoistools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/groebnertools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/groebnertools.cpython-311.pyc index 7faec9b9..032521e2 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/groebnertools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/groebnertools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/heuristicgcd.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/heuristicgcd.cpython-311.pyc index c80ca6ae..dc0aaf9c 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/heuristicgcd.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/heuristicgcd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/monomials.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/monomials.cpython-311.pyc index 3347a0c4..7bd5480a 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/monomials.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/monomials.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/orderings.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/orderings.cpython-311.pyc index b50d7076..021715e5 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/orderings.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/orderings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/orthopolys.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/orthopolys.cpython-311.pyc index 224532fe..d172f82e 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/orthopolys.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/orthopolys.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/partfrac.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/partfrac.cpython-311.pyc index c675ed21..fb28075d 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/partfrac.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/partfrac.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyclasses.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyclasses.cpython-311.pyc index b18bf5ab..e8ab6819 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyclasses.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyclasses.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyconfig.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyconfig.cpython-311.pyc index 81d1d0de..e270f3eb 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyconfig.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyconfig.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyerrors.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyerrors.cpython-311.pyc index b3df7586..18b3059d 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyerrors.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyerrors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyfuncs.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyfuncs.cpython-311.pyc index 011fc834..15b6a172 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyfuncs.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyfuncs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyoptions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyoptions.cpython-311.pyc index 565e263b..12217713 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyoptions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyoptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-311.pyc index 026354a7..58393e43 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyroots.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyroots.cpython-311.pyc index a256642c..3dd63859 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyroots.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyroots.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polytools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polytools.cpython-311.pyc index 36f232d9..c33a1cc7 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polytools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polytools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyutils.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyutils.cpython-311.pyc index e1aed142..57715383 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/polyutils.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/polyutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/rationaltools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/rationaltools.cpython-311.pyc index ee2231f0..f2f2c088 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/rationaltools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/rationaltools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/ring_series.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/ring_series.cpython-311.pyc index 117f358b..2b1ed10a 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/ring_series.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/ring_series.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/rings.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/rings.cpython-311.pyc index 1dde685b..a30122e9 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/rings.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/rings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/rootisolation.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/rootisolation.cpython-311.pyc index 9f6613ac..f63aaa0a 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/rootisolation.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/rootisolation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/rootoftools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/rootoftools.cpython-311.pyc index 35c54f40..237d79e9 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/rootoftools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/rootoftools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/solvers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/solvers.cpython-311.pyc index 2f1b84c6..7a1136b5 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/solvers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/solvers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/specialpolys.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/specialpolys.cpython-311.pyc index 0dede234..e6d1fba6 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/specialpolys.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/specialpolys.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/__pycache__/sqfreetools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/__pycache__/sqfreetools.cpython-311.pyc index 7eef3568..84125d15 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/__pycache__/sqfreetools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/__pycache__/sqfreetools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/__init__.cpython-311.pyc index bcd91340..40631e05 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/extensions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/extensions.cpython-311.pyc index ef638888..fbab80dd 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/extensions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/extensions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/homomorphisms.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/homomorphisms.cpython-311.pyc index 219d3d49..1d318339 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/homomorphisms.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/homomorphisms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/ideals.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/ideals.cpython-311.pyc index a8e4f655..2f0b5554 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/ideals.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/ideals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/modules.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/modules.cpython-311.pyc index 0c62ce2a..4a0591bb 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/modules.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/agca/__pycache__/modules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/__init__.cpython-311.pyc index 839cf3f9..6dc8908c 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/algebraicfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/algebraicfield.cpython-311.pyc index 568c948e..ed8ab8fe 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/algebraicfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/algebraicfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/characteristiczero.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/characteristiczero.cpython-311.pyc index edad1550..33fb45ec 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/characteristiczero.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/characteristiczero.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/complexfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/complexfield.cpython-311.pyc index 3bf414de..6f3804ac 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/complexfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/complexfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/compositedomain.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/compositedomain.cpython-311.pyc index 831f2678..9da38281 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/compositedomain.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/compositedomain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domain.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domain.cpython-311.pyc index ab2dd393..8fb0b9b4 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domain.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domainelement.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domainelement.cpython-311.pyc index 19e64151..9bfcbaf4 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domainelement.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/domainelement.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressiondomain.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressiondomain.cpython-311.pyc index 57b1cd8e..8c672581 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressiondomain.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressiondomain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressionrawdomain.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressionrawdomain.cpython-311.pyc index cb7ed371..dcf3724d 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressionrawdomain.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/expressionrawdomain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/field.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/field.cpython-311.pyc index db39d353..9c6928d0 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/field.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/field.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/finitefield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/finitefield.cpython-311.pyc index b8f161ff..9e330eab 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/finitefield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/finitefield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/fractionfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/fractionfield.cpython-311.pyc index c4b10b42..dcbb6d90 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/fractionfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/fractionfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gaussiandomains.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gaussiandomains.cpython-311.pyc index eb8a92b2..e9e8a868 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gaussiandomains.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gaussiandomains.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyfinitefield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyfinitefield.cpython-311.pyc index 8ead0069..13b9d136 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyfinitefield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyfinitefield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyintegerring.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyintegerring.cpython-311.pyc index c172f20a..6b651e18 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyintegerring.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyintegerring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyrationalfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyrationalfield.cpython-311.pyc index fb233376..93e04f7f 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyrationalfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/gmpyrationalfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/groundtypes.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/groundtypes.cpython-311.pyc index f99236e8..0c42996f 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/groundtypes.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/groundtypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/integerring.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/integerring.cpython-311.pyc index 25e8fbe4..61b94a66 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/integerring.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/integerring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/modularinteger.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/modularinteger.cpython-311.pyc index 23ed04dd..60c1e969 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/modularinteger.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/modularinteger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/mpelements.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/mpelements.cpython-311.pyc index c0c72623..00b631aa 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/mpelements.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/mpelements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/polynomialring.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/polynomialring.cpython-311.pyc index c6dbbe71..11f4518c 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/polynomialring.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/polynomialring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonfinitefield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonfinitefield.cpython-311.pyc index 1ac8bdc4..5aed6cee 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonfinitefield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonfinitefield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonintegerring.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonintegerring.cpython-311.pyc index 9e1bd89f..9567af7f 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonintegerring.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonintegerring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrational.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrational.cpython-311.pyc index 7c325f51..db7660a7 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrational.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrational.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrationalfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrationalfield.cpython-311.pyc index 58c0dae4..0ef1068a 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrationalfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/pythonrationalfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/rationalfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/rationalfield.cpython-311.pyc index f9fb49b9..925090db 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/rationalfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/rationalfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/realfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/realfield.cpython-311.pyc index 08562bf1..1efe76fb 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/realfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/realfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/ring.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/ring.cpython-311.pyc index ddac1b6d..293e8b99 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/ring.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/ring.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/simpledomain.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/simpledomain.cpython-311.pyc index 78ee7a2e..0b9d3cb3 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/simpledomain.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/domains/__pycache__/simpledomain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/__init__.cpython-311.pyc index 18eb092d..e2a21886 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/_typing.cpython-311.pyc index c254dcf5..93a620ee 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/_typing.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/ddm.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/ddm.cpython-311.pyc index 3990756d..e4acf1d9 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/ddm.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/ddm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/dense.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/dense.cpython-311.pyc index 56b3f331..e0f9c01e 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/dense.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/dense.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-311.pyc index 95f8d5d9..5f94521b 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainscalar.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainscalar.cpython-311.pyc index 68aafbd8..6d63f67a 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainscalar.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/domainscalar.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/eigen.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/eigen.cpython-311.pyc index fa236de2..184ff820 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/eigen.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/eigen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/exceptions.cpython-311.pyc index 33ba6801..f82bdd36 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/linsolve.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/linsolve.cpython-311.pyc index 9181cf75..ac135d2d 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/linsolve.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/linsolve.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/lll.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/lll.cpython-311.pyc index 4dcaa04d..7ef83566 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/lll.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/lll.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/normalforms.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/normalforms.cpython-311.pyc index f47d105e..c6aa798c 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/normalforms.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/normalforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/sdm.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/sdm.cpython-311.pyc index 4a6c94d1..0be33af4 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/sdm.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/matrices/__pycache__/sdm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/__init__.cpython-311.pyc index d43f0bca..b13e6e4e 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/basis.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/basis.cpython-311.pyc index 2c803831..4f5cb73e 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/basis.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/basis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/exceptions.cpython-311.pyc index de7e1e49..65396ff8 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galois_resolvents.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galois_resolvents.cpython-311.pyc index f56d7a71..792e1f5c 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galois_resolvents.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galois_resolvents.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galoisgroups.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galoisgroups.cpython-311.pyc index 5dcbde86..45d7d7e8 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galoisgroups.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/galoisgroups.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/minpoly.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/minpoly.cpython-311.pyc index 7a424012..72b7335b 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/minpoly.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/minpoly.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/modules.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/modules.cpython-311.pyc index b71f1af3..2cd445ef 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/modules.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/modules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/primes.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/primes.cpython-311.pyc index 27a845fd..0c02c26d 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/primes.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/primes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/resolvent_lookup.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/resolvent_lookup.cpython-311.pyc index 9da88b49..41b96509 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/resolvent_lookup.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/resolvent_lookup.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/subfield.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/subfield.cpython-311.pyc index a091c370..0c1cfac1 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/subfield.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/subfield.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/utilities.cpython-311.pyc b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/utilities.cpython-311.pyc index a3ee92b5..10b0215c 100644 Binary files a/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/utilities.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/polys/numberfields/__pycache__/utilities.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/__init__.cpython-311.pyc index 13e7dd5d..a0b7b445 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/codeprinter.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/codeprinter.cpython-311.pyc index 9d027ef3..2e4efdbb 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/codeprinter.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/codeprinter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/conventions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/conventions.cpython-311.pyc index 66e7b940..fd1914ae 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/conventions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/conventions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/defaults.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/defaults.cpython-311.pyc index 09433584..9c932eb3 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/defaults.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/defaults.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/dot.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/dot.cpython-311.pyc index a73c7e9f..8dbd2ca8 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/dot.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/dot.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/glsl.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/glsl.cpython-311.pyc index 676e131c..3f7af526 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/glsl.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/glsl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/gtk.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/gtk.cpython-311.pyc index 222a22b6..fe94919a 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/gtk.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/gtk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/jscode.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/jscode.cpython-311.pyc index e568b840..ff08c9f3 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/jscode.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/jscode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/julia.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/julia.cpython-311.pyc index 9b55cb52..0cc56e01 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/julia.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/julia.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/lambdarepr.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/lambdarepr.cpython-311.pyc index d651ca64..383e58bd 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/lambdarepr.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/lambdarepr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/latex.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/latex.cpython-311.pyc index 7bda2d8c..94be72b9 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/latex.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/latex.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/maple.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/maple.cpython-311.pyc index 2316ee15..8cf74e68 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/maple.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/maple.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/mathematica.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/mathematica.cpython-311.pyc index 94a99b48..e1f95184 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/mathematica.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/mathematica.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/mathml.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/mathml.cpython-311.pyc index 7034c9e5..0504afc5 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/mathml.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/mathml.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/numpy.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/numpy.cpython-311.pyc index e1310db9..97c8ef97 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/numpy.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/numpy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/octave.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/octave.cpython-311.pyc index a4632b2a..f7d20917 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/octave.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/octave.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/precedence.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/precedence.cpython-311.pyc index 7ba5e0ee..127268bc 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/precedence.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/precedence.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/preview.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/preview.cpython-311.pyc index c90f86c5..f240b442 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/preview.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/preview.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/printer.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/printer.cpython-311.pyc index 99df8a9a..dcca1460 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/printer.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/printer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/pycode.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/pycode.cpython-311.pyc index f8fce5c1..b00e41e4 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/pycode.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/pycode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/python.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/python.cpython-311.pyc index 1706ce35..708b6a0e 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/python.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/python.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/rcode.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/rcode.cpython-311.pyc index a514c6ab..15cfb000 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/rcode.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/rcode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/repr.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/repr.cpython-311.pyc index 93f61fbe..0239f4ab 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/repr.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/repr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/rust.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/rust.cpython-311.pyc index 6b904ca6..6f87459e 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/rust.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/rust.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/smtlib.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/smtlib.cpython-311.pyc index 929e479c..5634dcea 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/smtlib.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/smtlib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/str.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/str.cpython-311.pyc index 143bca0d..0d19c669 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/str.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/str.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/tableform.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/tableform.cpython-311.pyc index 4a4de361..a5bd066e 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/tableform.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/tableform.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/__pycache__/tree.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/__pycache__/tree.cpython-311.pyc index 51f2aa5d..3bddfb21 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/__pycache__/tree.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/__pycache__/tree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/__init__.cpython-311.pyc index 11b705df..d2c04222 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty.cpython-311.pyc index 6cfd0cd8..5172f1af 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty_symbology.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty_symbology.cpython-311.pyc index 04b77e59..aec8cfb2 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty_symbology.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/pretty_symbology.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/stringpict.cpython-311.pyc b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/stringpict.cpython-311.pyc index bf4e9db5..a3827d63 100644 Binary files a/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/stringpict.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/printing/pretty/__pycache__/stringpict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/__init__.cpython-311.pyc index 185e9192..99510c54 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/approximants.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/approximants.cpython-311.pyc index a9b8f477..0266a5ff 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/approximants.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/approximants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/formal.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/formal.cpython-311.pyc index 80e429d7..6d335299 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/formal.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/formal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/fourier.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/fourier.cpython-311.pyc index b58d104d..40d01940 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/fourier.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/fourier.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/gruntz.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/gruntz.cpython-311.pyc index 21f5321f..e794fcb9 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/gruntz.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/gruntz.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/limits.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/limits.cpython-311.pyc index 5cbf431d..4a55946e 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/limits.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/limits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/limitseq.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/limitseq.cpython-311.pyc index c2aa1c06..b4901726 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/limitseq.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/limitseq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/order.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/order.cpython-311.pyc index f0838b76..2aab3072 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/order.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/order.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/residues.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/residues.cpython-311.pyc index b3f0ca98..be984c57 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/residues.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/residues.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/sequences.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/sequences.cpython-311.pyc index 8ce029fb..429f4659 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/sequences.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/sequences.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/series.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/series.cpython-311.pyc index 499da1fa..c17d8e79 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/series.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/series.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/series/__pycache__/series_class.cpython-311.pyc b/.venv/Lib/site-packages/sympy/series/__pycache__/series_class.cpython-311.pyc index 9cf1fa19..f80387fb 100644 Binary files a/.venv/Lib/site-packages/sympy/series/__pycache__/series_class.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/series/__pycache__/series_class.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/__pycache__/__init__.cpython-311.pyc index f9b0ffee..61f40971 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/__pycache__/conditionset.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/__pycache__/conditionset.cpython-311.pyc index e54455ae..1e50068b 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/__pycache__/conditionset.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/__pycache__/conditionset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/__pycache__/contains.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/__pycache__/contains.cpython-311.pyc index 49e79d93..53a9433d 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/__pycache__/contains.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/__pycache__/contains.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/__pycache__/fancysets.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/__pycache__/fancysets.cpython-311.pyc index 4e8268dc..11de4c4a 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/__pycache__/fancysets.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/__pycache__/fancysets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/__pycache__/ordinals.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/__pycache__/ordinals.cpython-311.pyc index 95c500a2..c23db087 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/__pycache__/ordinals.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/__pycache__/ordinals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/__pycache__/powerset.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/__pycache__/powerset.cpython-311.pyc index ebec380c..4ac0d85d 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/__pycache__/powerset.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/__pycache__/powerset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/__pycache__/sets.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/__pycache__/sets.cpython-311.pyc index b9da87ce..49df6fae 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/__pycache__/sets.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/__pycache__/sets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/__init__.cpython-311.pyc index d281c5ff..10ff801d 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/comparison.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/comparison.cpython-311.pyc index 53c96ef4..21780ca3 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/comparison.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/comparison.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/intersection.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/intersection.cpython-311.pyc index 4700f6ee..423e9b5e 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/intersection.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/intersection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/union.cpython-311.pyc b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/union.cpython-311.pyc index c72b7e20..456bf80c 100644 Binary files a/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/union.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/sets/handlers/__pycache__/union.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/__init__.cpython-311.pyc index 69dff3e6..5902d531 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/combsimp.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/combsimp.cpython-311.pyc index 70b934ae..81082783 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/combsimp.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/combsimp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_main.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_main.cpython-311.pyc index 7a2a4f1e..a7bfa80e 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_main.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_opts.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_opts.cpython-311.pyc index d15f691f..51512ea4 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_opts.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/cse_opts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/epathtools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/epathtools.cpython-311.pyc index 88fe4bb8..8cc02448 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/epathtools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/epathtools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/fu.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/fu.cpython-311.pyc index 3932f76a..56751557 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/fu.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/fu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/gammasimp.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/gammasimp.cpython-311.pyc index 985d3ce1..290aaa73 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/gammasimp.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/gammasimp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/hyperexpand.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/hyperexpand.cpython-311.pyc index 0d66d8cb..369e689d 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/hyperexpand.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/hyperexpand.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/powsimp.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/powsimp.cpython-311.pyc index a152313d..a7659a55 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/powsimp.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/powsimp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/radsimp.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/radsimp.cpython-311.pyc index 9efd4788..1e3ffbeb 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/radsimp.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/radsimp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/ratsimp.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/ratsimp.cpython-311.pyc index a04ca57a..d2a28336 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/ratsimp.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/ratsimp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/simplify.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/simplify.cpython-311.pyc index fd57eb15..373bac7e 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/simplify.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/simplify.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/sqrtdenest.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/sqrtdenest.cpython-311.pyc index 55c14809..4384f098 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/sqrtdenest.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/sqrtdenest.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/simplify/__pycache__/trigsimp.cpython-311.pyc b/.venv/Lib/site-packages/sympy/simplify/__pycache__/trigsimp.cpython-311.pyc index 62363d71..c8c1bcf0 100644 Binary files a/.venv/Lib/site-packages/sympy/simplify/__pycache__/trigsimp.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/simplify/__pycache__/trigsimp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/__init__.cpython-311.pyc index 8bf20d71..0610518a 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/bivariate.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/bivariate.cpython-311.pyc index f5c0e7b1..684b0943 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/bivariate.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/bivariate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/decompogen.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/decompogen.cpython-311.pyc index ca6bc93f..5542532e 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/decompogen.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/decompogen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/deutils.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/deutils.cpython-311.pyc index 10ed34aa..858c0c59 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/deutils.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/deutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/inequalities.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/inequalities.cpython-311.pyc index eb1f025e..139cbdb7 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/inequalities.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/inequalities.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/pde.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/pde.cpython-311.pyc index c7eaa720..64896531 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/pde.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/pde.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/polysys.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/polysys.cpython-311.pyc index 050d2c27..5864403d 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/polysys.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/polysys.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/recurr.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/recurr.cpython-311.pyc index f1933c02..0794882f 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/recurr.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/recurr.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/solvers.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/solvers.cpython-311.pyc index cc0d095c..dc65cc6c 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/solvers.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/solvers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/__pycache__/solveset.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/__pycache__/solveset.cpython-311.pyc index 3575f73f..273611b6 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/__pycache__/solveset.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/__pycache__/solveset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/__init__.cpython-311.pyc index 0fd9aa7f..4dda7bbf 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-311.pyc index 154fa32f..cd37a259 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/__init__.cpython-311.pyc index 5369b61f..3ab23806 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/hypergeometric.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/hypergeometric.cpython-311.pyc index 3600b294..5890f064 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/hypergeometric.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/hypergeometric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/lie_group.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/lie_group.cpython-311.pyc index 9ce2a3c4..06fa542d 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/lie_group.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/lie_group.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/nonhomogeneous.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/nonhomogeneous.cpython-311.pyc index 590fc1e8..48051743 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/nonhomogeneous.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/nonhomogeneous.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-311.pyc index f44e5106..4b2175a8 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/riccati.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/riccati.cpython-311.pyc index d8a2cf14..4efb324d 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/riccati.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/riccati.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/single.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/single.cpython-311.pyc index 246d87ee..643758c1 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/single.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/single.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/subscheck.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/subscheck.cpython-311.pyc index e4c23e0e..ab0f1532 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/subscheck.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/subscheck.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/systems.cpython-311.pyc b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/systems.cpython-311.pyc index 5c05e612..8c253430 100644 Binary files a/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/systems.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/solvers/ode/__pycache__/systems.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/__pycache__/__init__.cpython-311.pyc index 598a204a..c36b2302 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/__pycache__/core.cpython-311.pyc index 6675c33f..c9151a79 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/__pycache__/rl.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/__pycache__/rl.cpython-311.pyc index 14bb9282..03d861d8 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/__pycache__/rl.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/__pycache__/rl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/__pycache__/tools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/__pycache__/tools.cpython-311.pyc index 18a9c063..f3ba66ea 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/__pycache__/tools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/__pycache__/tools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/__pycache__/traverse.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/__pycache__/traverse.cpython-311.pyc index 55e845dd..db677395 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/__pycache__/traverse.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/__pycache__/traverse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/__pycache__/tree.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/__pycache__/tree.cpython-311.pyc index 2a80b6c0..bc1e453a 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/__pycache__/tree.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/__pycache__/tree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/__pycache__/util.cpython-311.pyc index 6784cb94..0072388e 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/__init__.cpython-311.pyc index cdf3918e..55460c69 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/core.cpython-311.pyc index 47a0aaca..03438555 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/tools.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/tools.cpython-311.pyc index 69cc79f1..ac0df5c8 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/tools.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/tools.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/traverse.cpython-311.pyc b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/traverse.cpython-311.pyc index 03286711..072661ea 100644 Binary files a/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/traverse.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/strategies/branch/__pycache__/traverse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/__pycache__/__init__.cpython-311.pyc index 502018e2..c37ea76b 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/__pycache__/functions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/__pycache__/functions.cpython-311.pyc index 149b64fe..c5715377 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/__pycache__/functions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/__pycache__/functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/__pycache__/index_methods.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/__pycache__/index_methods.cpython-311.pyc index 6d0913df..3cb55eb8 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/__pycache__/index_methods.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/__pycache__/index_methods.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/__pycache__/indexed.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/__pycache__/indexed.cpython-311.pyc index c05826f5..867062d2 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/__pycache__/indexed.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/__pycache__/indexed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/__init__.cpython-311.pyc index fd30cdad..0fef050c 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/array_comprehension.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/array_comprehension.cpython-311.pyc index cce6129e..f15cb019 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/array_comprehension.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/array_comprehension.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/arrayop.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/arrayop.cpython-311.pyc index 2c1de439..103f6bd6 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/arrayop.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/arrayop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/dense_ndim_array.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/dense_ndim_array.cpython-311.pyc index fc2ae36f..bade818c 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/dense_ndim_array.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/dense_ndim_array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/mutable_ndim_array.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/mutable_ndim_array.cpython-311.pyc index 5424f34e..977c0c2c 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/mutable_ndim_array.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/mutable_ndim_array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/ndim_array.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/ndim_array.cpython-311.pyc index ffb2b920..d230606e 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/ndim_array.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/ndim_array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/sparse_ndim_array.cpython-311.pyc b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/sparse_ndim_array.cpython-311.pyc index 204156fe..45053011 100644 Binary files a/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/sparse_ndim_array.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/tensor/array/__pycache__/sparse_ndim_array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/__init__.cpython-311.pyc index bb17d5c5..0504a534 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/decorator.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/decorator.cpython-311.pyc index a13c4a77..d32907be 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/decorator.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/decorator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/enumerative.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/enumerative.cpython-311.pyc index 61f047c0..bbe7f0e0 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/enumerative.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/enumerative.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/exceptions.cpython-311.pyc index 884fb7b4..7cf09549 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/iterables.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/iterables.cpython-311.pyc index a2ff4bd9..a0172ddf 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/iterables.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/iterables.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/lambdify.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/lambdify.cpython-311.pyc index eba820df..e5b6c991 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/lambdify.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/lambdify.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/magic.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/magic.cpython-311.pyc index 69e9bc89..08746970 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/magic.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/magic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/memoization.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/memoization.cpython-311.pyc index b5827abd..382e61c1 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/memoization.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/memoization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/misc.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/misc.cpython-311.pyc index 54580267..f604df58 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/misc.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/pkgdata.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/pkgdata.cpython-311.pyc index 1cac003b..06573b5d 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/pkgdata.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/pkgdata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/source.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/source.cpython-311.pyc index c3b108f3..5d5fedab 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/source.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/source.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/__pycache__/timeutils.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/__pycache__/timeutils.cpython-311.pyc index 72e0b385..36cf11f2 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/__pycache__/timeutils.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/__pycache__/timeutils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/sympy/utilities/mathml/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/sympy/utilities/mathml/__pycache__/__init__.cpython-311.pyc index cc01b0b8..a1376f4d 100644 Binary files a/.venv/Lib/site-packages/sympy/utilities/mathml/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/sympy/utilities/mathml/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/__init__.cpython-311.pyc index eee67c96..2cf63648 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/about.cpython-311.pyc index 6060b2f4..1a4a2de1 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/api.cpython-311.pyc index 7c9eb2d9..e0a01c03 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/compat.cpython-311.pyc index fd6729bc..ff5be17f 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/config.cpython-311.pyc index 6ca975bc..070f5896 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/initializers.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/initializers.cpython-311.pyc index ef74c67c..6f20bd1f 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/initializers.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/initializers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/loss.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/loss.cpython-311.pyc index d2f3629a..be19c8dd 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/loss.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/loss.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/model.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/model.cpython-311.pyc index 7c4451ae..322a4865 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/model.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/optimizers.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/optimizers.cpython-311.pyc index b5147b82..cd1ebadc 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/optimizers.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/optimizers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/schedules.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/schedules.cpython-311.pyc index 6b4478b1..b80fcd91 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/schedules.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/schedules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/types.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/types.cpython-311.pyc index 8ab81449..94cb5b54 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/types.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/thinc/__pycache__/util.cpython-311.pyc index d5f0c68d..c335e9a7 100644 Binary files a/.venv/Lib/site-packages/thinc/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/backends/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/thinc/backends/__pycache__/__init__.cpython-311.pyc index e8ea72b9..41786857 100644 Binary files a/.venv/Lib/site-packages/thinc/backends/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/backends/__pycache__/_cupy_allocators.cpython-311.pyc b/.venv/Lib/site-packages/thinc/backends/__pycache__/_cupy_allocators.cpython-311.pyc index f08bd131..5ba89cf7 100644 Binary files a/.venv/Lib/site-packages/thinc/backends/__pycache__/_cupy_allocators.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/backends/__pycache__/_cupy_allocators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/backends/__pycache__/_custom_kernels.cpython-311.pyc b/.venv/Lib/site-packages/thinc/backends/__pycache__/_custom_kernels.cpython-311.pyc index 14004a53..0cf261f2 100644 Binary files a/.venv/Lib/site-packages/thinc/backends/__pycache__/_custom_kernels.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/backends/__pycache__/_custom_kernels.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/backends/__pycache__/_param_server.cpython-311.pyc b/.venv/Lib/site-packages/thinc/backends/__pycache__/_param_server.cpython-311.pyc index 466cac75..6e2b3f2c 100644 Binary files a/.venv/Lib/site-packages/thinc/backends/__pycache__/_param_server.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/backends/__pycache__/_param_server.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/backends/__pycache__/cupy_ops.cpython-311.pyc b/.venv/Lib/site-packages/thinc/backends/__pycache__/cupy_ops.cpython-311.pyc index 62f85848..ec8faaae 100644 Binary files a/.venv/Lib/site-packages/thinc/backends/__pycache__/cupy_ops.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/backends/__pycache__/cupy_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/backends/__pycache__/mps_ops.cpython-311.pyc b/.venv/Lib/site-packages/thinc/backends/__pycache__/mps_ops.cpython-311.pyc index 94ff852a..60190a69 100644 Binary files a/.venv/Lib/site-packages/thinc/backends/__pycache__/mps_ops.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/backends/__pycache__/mps_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/backends/__pycache__/ops.cpython-311.pyc b/.venv/Lib/site-packages/thinc/backends/__pycache__/ops.cpython-311.pyc index 414ea1da..cfaf605a 100644 Binary files a/.venv/Lib/site-packages/thinc/backends/__pycache__/ops.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/backends/__pycache__/ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/extra/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/thinc/extra/__pycache__/__init__.cpython-311.pyc index f7abdd6f..da91c838 100644 Binary files a/.venv/Lib/site-packages/thinc/extra/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/extra/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/__init__.cpython-311.pyc index 70875d76..1758004e 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/add.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/add.cpython-311.pyc index c6700021..5f38f6aa 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/add.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/add.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/array_getitem.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/array_getitem.cpython-311.pyc index 335106f9..8e1e1bee 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/array_getitem.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/array_getitem.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/bidirectional.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/bidirectional.cpython-311.pyc index 866bc1f8..aceb9483 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/bidirectional.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/bidirectional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/cauchysimilarity.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/cauchysimilarity.cpython-311.pyc index 5abde48a..cdd05930 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/cauchysimilarity.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/cauchysimilarity.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/chain.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/chain.cpython-311.pyc index 864a6a88..ff69faf1 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/chain.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/chain.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/clipped_linear.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/clipped_linear.cpython-311.pyc index 1f2849c5..db80f099 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/clipped_linear.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/clipped_linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/clone.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/clone.cpython-311.pyc index 766f9f24..dbe590eb 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/clone.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/clone.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/concatenate.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/concatenate.cpython-311.pyc index b3db78c3..ae3de3d0 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/concatenate.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/concatenate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/dish.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/dish.cpython-311.pyc index 6c4d2a71..69674418 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/dish.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/dish.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/dropout.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/dropout.cpython-311.pyc index e9928618..04d6134d 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/dropout.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/dropout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/embed.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/embed.cpython-311.pyc index f5dfc27f..9480a956 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/embed.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/embed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/expand_window.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/expand_window.cpython-311.pyc index 2b093fe9..701b94b8 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/expand_window.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/expand_window.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/gelu.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/gelu.cpython-311.pyc index 90a51e4f..447df1b5 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/gelu.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/gelu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish.cpython-311.pyc index f3dddd1c..21f906b5 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish_mobilenet.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish_mobilenet.cpython-311.pyc index e4275f88..7a23b380 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish_mobilenet.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/hard_swish_mobilenet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/hashembed.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/hashembed.cpython-311.pyc index 1acbd83c..e7980514 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/hashembed.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/hashembed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/layernorm.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/layernorm.cpython-311.pyc index 48261df4..2a44b92f 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/layernorm.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/layernorm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/linear.cpython-311.pyc index 0520675b..db6b888b 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/list2array.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/list2array.cpython-311.pyc index f9b69d0c..0e12df5d 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/list2array.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/list2array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/list2padded.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/list2padded.cpython-311.pyc index 34f6b4d1..bc14057a 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/list2padded.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/list2padded.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/list2ragged.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/list2ragged.cpython-311.pyc index f5eb9f62..b1ad862a 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/list2ragged.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/list2ragged.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/logistic.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/logistic.cpython-311.pyc index 70b81433..c2ade207 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/logistic.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/logistic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/lstm.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/lstm.cpython-311.pyc index 0da6f78b..09a6cbb2 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/lstm.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/lstm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/map_list.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/map_list.cpython-311.pyc index 6a20582d..7e2de358 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/map_list.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/map_list.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/maxout.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/maxout.cpython-311.pyc index 99200b59..1493263d 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/maxout.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/maxout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/mish.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/mish.cpython-311.pyc index 87cfdaf9..7bb76f48 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/mish.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/mish.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/multisoftmax.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/multisoftmax.cpython-311.pyc index aa45abe4..72abeda8 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/multisoftmax.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/multisoftmax.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/mxnetwrapper.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/mxnetwrapper.cpython-311.pyc index fece2d8a..982802c9 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/mxnetwrapper.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/mxnetwrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/noop.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/noop.cpython-311.pyc index 6027c967..33933129 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/noop.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/noop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/padded2list.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/padded2list.cpython-311.pyc index 2c081734..288f1a27 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/padded2list.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/padded2list.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention.cpython-311.pyc index 2b948462..ad75c2e4 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention_v2.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention_v2.cpython-311.pyc index 5f7bff5d..33ebcf89 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention_v2.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/parametricattention_v2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/pytorchwrapper.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/pytorchwrapper.cpython-311.pyc index 5ddc5e75..19584bf3 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/pytorchwrapper.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/pytorchwrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/ragged2list.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/ragged2list.cpython-311.pyc index b5dbafa5..287cd7bc 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/ragged2list.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/ragged2list.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_first.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_first.cpython-311.pyc index 81dbeb1b..d3698060 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_first.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_first.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_last.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_last.cpython-311.pyc index 2b83556b..1748130e 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_last.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_last.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_max.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_max.cpython-311.pyc index 2a6e15df..7aef64dc 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_max.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_max.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_mean.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_mean.cpython-311.pyc index 58ccd0dc..55bcbff9 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_mean.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_mean.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_sum.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_sum.cpython-311.pyc index 817c5aec..83fdd861 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_sum.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/reduce_sum.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/relu.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/relu.cpython-311.pyc index fa25ae2d..29c54641 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/relu.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/remap_ids.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/remap_ids.cpython-311.pyc index 6286168a..b0f724d8 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/remap_ids.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/remap_ids.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/residual.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/residual.cpython-311.pyc index e2baffa6..f5ab35d1 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/residual.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/residual.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/resizable.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/resizable.cpython-311.pyc index 5b346d54..4926f1ea 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/resizable.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/resizable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/siamese.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/siamese.cpython-311.pyc index 51df26e3..ab019084 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/siamese.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/siamese.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid.cpython-311.pyc index c934dfb9..66c49cb9 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid_activation.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid_activation.cpython-311.pyc index 8ed6f291..ce0e4979 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid_activation.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/sigmoid_activation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax.cpython-311.pyc index 5c4c9bba..e26a4058 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax_activation.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax_activation.cpython-311.pyc index 407185c3..d83f0f84 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax_activation.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/softmax_activation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/strings2arrays.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/strings2arrays.cpython-311.pyc index c79d1089..566c1172 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/strings2arrays.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/strings2arrays.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/swish.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/swish.cpython-311.pyc index 9e016434..736ceda0 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/swish.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/swish.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/tensorflowwrapper.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/tensorflowwrapper.cpython-311.pyc index fa5c3c2b..07710768 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/tensorflowwrapper.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/tensorflowwrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/torchscriptwrapper.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/torchscriptwrapper.cpython-311.pyc index 755c9ba7..f35b27cd 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/torchscriptwrapper.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/torchscriptwrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/tuplify.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/tuplify.cpython-311.pyc index 2b287e58..1f769fbe 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/tuplify.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/tuplify.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/uniqued.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/uniqued.cpython-311.pyc index 0765b04e..e656ff1c 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/uniqued.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/uniqued.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array.cpython-311.pyc index 6b9647eb..485f2572 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array2d.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array2d.cpython-311.pyc index 3cf2ae97..8388898c 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array2d.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_array2d.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_cpu.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_cpu.cpython-311.pyc index 68bb3734..3609f5c7 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_cpu.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_cpu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_debug.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_debug.cpython-311.pyc index fb35fa91..44f08aa9 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_debug.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_debug.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten.cpython-311.pyc index e3c8b3bc..0053e7df 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten_v2.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten_v2.cpython-311.pyc index 9f3aca49..efd99cd9 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten_v2.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_flatten_v2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_getitem.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_getitem.cpython-311.pyc index 300e17b5..f87efe4f 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_getitem.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_getitem.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_list.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_list.cpython-311.pyc index dcb4934d..c76f9d9f 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_list.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_list.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_nvtx_range.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_nvtx_range.cpython-311.pyc index 177a225e..9c1b9446 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_nvtx_range.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_nvtx_range.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_padded.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_padded.cpython-311.pyc index 953f1852..08875f5b 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_padded.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_padded.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_ragged.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_ragged.cpython-311.pyc index c6cbd240..02795b5f 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_ragged.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_ragged.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_reshape.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_reshape.cpython-311.pyc index a9df3978..6ce99f02 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_reshape.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_reshape.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_signpost_interval.cpython-311.pyc b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_signpost_interval.cpython-311.pyc index 6be08b87..b1a3975a 100644 Binary files a/.venv/Lib/site-packages/thinc/layers/__pycache__/with_signpost_interval.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/layers/__pycache__/with_signpost_interval.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/shims/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/thinc/shims/__pycache__/__init__.cpython-311.pyc index ec5a7048..c1233875 100644 Binary files a/.venv/Lib/site-packages/thinc/shims/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/shims/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/shims/__pycache__/mxnet.cpython-311.pyc b/.venv/Lib/site-packages/thinc/shims/__pycache__/mxnet.cpython-311.pyc index 23a7f6c4..dd2c6ec5 100644 Binary files a/.venv/Lib/site-packages/thinc/shims/__pycache__/mxnet.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/shims/__pycache__/mxnet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch.cpython-311.pyc b/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch.cpython-311.pyc index bc2e5921..fd876534 100644 Binary files a/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch_grad_scaler.cpython-311.pyc b/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch_grad_scaler.cpython-311.pyc index ddc08d0f..f35362c7 100644 Binary files a/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch_grad_scaler.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/shims/__pycache__/pytorch_grad_scaler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/shims/__pycache__/shim.cpython-311.pyc b/.venv/Lib/site-packages/thinc/shims/__pycache__/shim.cpython-311.pyc index a10c29a6..4083d2a3 100644 Binary files a/.venv/Lib/site-packages/thinc/shims/__pycache__/shim.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/shims/__pycache__/shim.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/shims/__pycache__/tensorflow.cpython-311.pyc b/.venv/Lib/site-packages/thinc/shims/__pycache__/tensorflow.cpython-311.pyc index dc7f65dc..746332f4 100644 Binary files a/.venv/Lib/site-packages/thinc/shims/__pycache__/tensorflow.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/shims/__pycache__/tensorflow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/thinc/shims/__pycache__/torchscript.cpython-311.pyc b/.venv/Lib/site-packages/thinc/shims/__pycache__/torchscript.cpython-311.pyc index 5f65f971..ea5c8199 100644 Binary files a/.venv/Lib/site-packages/thinc/shims/__pycache__/torchscript.cpython-311.pyc and b/.venv/Lib/site-packages/thinc/shims/__pycache__/torchscript.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/__pycache__/__init__.cpython-311.pyc index 6ea925bb..af3af30d 100644 Binary files a/.venv/Lib/site-packages/tokenizers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/decoders/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/decoders/__pycache__/__init__.cpython-311.pyc index 503a9b06..e63afb69 100644 Binary files a/.venv/Lib/site-packages/tokenizers/decoders/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/decoders/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/__init__.cpython-311.pyc index b37a3c4c..d91c39fd 100644 Binary files a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/base_tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/base_tokenizer.cpython-311.pyc index 57600fda..d6cbb6fb 100644 Binary files a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/base_tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/base_tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/bert_wordpiece.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/bert_wordpiece.cpython-311.pyc index 695667d4..dc6efa91 100644 Binary files a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/bert_wordpiece.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/bert_wordpiece.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/byte_level_bpe.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/byte_level_bpe.cpython-311.pyc index 96ce2f8d..9dae1f5e 100644 Binary files a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/byte_level_bpe.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/byte_level_bpe.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/char_level_bpe.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/char_level_bpe.cpython-311.pyc index 56cfb09a..9d6a0605 100644 Binary files a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/char_level_bpe.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/char_level_bpe.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_bpe.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_bpe.cpython-311.pyc index 0e4871a6..733a183b 100644 Binary files a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_bpe.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_bpe.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_unigram.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_unigram.cpython-311.pyc index b0b12155..fcca6a79 100644 Binary files a/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_unigram.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/implementations/__pycache__/sentencepiece_unigram.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/models/__pycache__/__init__.cpython-311.pyc index 41e62cf5..f03d7538 100644 Binary files a/.venv/Lib/site-packages/tokenizers/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/normalizers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/normalizers/__pycache__/__init__.cpython-311.pyc index 4082f69a..17f94a08 100644 Binary files a/.venv/Lib/site-packages/tokenizers/normalizers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/normalizers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/pre_tokenizers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/pre_tokenizers/__pycache__/__init__.cpython-311.pyc index 7d104575..8da3af07 100644 Binary files a/.venv/Lib/site-packages/tokenizers/pre_tokenizers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/pre_tokenizers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/processors/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/processors/__pycache__/__init__.cpython-311.pyc index 3ddf61e4..2e316eab 100644 Binary files a/.venv/Lib/site-packages/tokenizers/processors/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/processors/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tokenizers/trainers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tokenizers/trainers/__pycache__/__init__.cpython-311.pyc index 9ac24bdf..fa0ec3da 100644 Binary files a/.venv/Lib/site-packages/tokenizers/trainers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tokenizers/trainers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_VF.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_VF.cpython-311.pyc index 2a034025..b61d021a 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_VF.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_VF.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/__config__.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/__config__.cpython-311.pyc index 4bbc31be..c453be46 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/__config__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/__config__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/__future__.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/__future__.cpython-311.pyc index 6f008e55..438b3405 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/__future__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/__future__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/__init__.cpython-311.pyc index b477b55c..ff49cc71 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_classes.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_classes.cpython-311.pyc index 07b4a328..7e24104d 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_classes.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_classes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_compile.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_compile.cpython-311.pyc index 8e4524d9..9cf0cf91 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_compile.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_compile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_custom_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_custom_ops.cpython-311.pyc index 9b8c1f99..1dc079b4 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_custom_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_custom_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_guards.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_guards.cpython-311.pyc index 85416cf8..17f9b0b1 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_guards.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_guards.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_jit_internal.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_jit_internal.cpython-311.pyc index c207f007..f9b3320c 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_jit_internal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_jit_internal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_linalg_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_linalg_utils.cpython-311.pyc index 52036e00..3a4f159c 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_linalg_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_linalg_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_lobpcg.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_lobpcg.cpython-311.pyc index e49d2d54..64be693e 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_lobpcg.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_lobpcg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_lowrank.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_lowrank.cpython-311.pyc index b191c820..e9d59402 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_lowrank.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_lowrank.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_meta_registrations.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_meta_registrations.cpython-311.pyc index 155c9d68..180007a6 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_meta_registrations.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_meta_registrations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_namedtensor_internals.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_namedtensor_internals.cpython-311.pyc index cafefb66..9d9963f7 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_namedtensor_internals.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_namedtensor_internals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_ops.cpython-311.pyc index 630dce4f..671725c8 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_sources.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_sources.cpython-311.pyc index 8f68e670..e05e3976 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_sources.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_sources.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_storage_docs.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_storage_docs.cpython-311.pyc index 59cb4416..f37a492a 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_storage_docs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_storage_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_streambase.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_streambase.cpython-311.pyc index 5360acd3..1f4c0a31 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_streambase.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_streambase.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_tensor.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_tensor.cpython-311.pyc index 9bcd83d3..53ca4f5e 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_tensor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_tensor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_tensor_docs.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_tensor_docs.cpython-311.pyc index 14080dc9..19292a39 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_tensor_docs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_tensor_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_tensor_str.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_tensor_str.cpython-311.pyc index d16e77fb..4ecb8ebc 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_tensor_str.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_tensor_str.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_torch_docs.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_torch_docs.cpython-311.pyc index 1075b2e4..793e647f 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_torch_docs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_torch_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_utils.cpython-311.pyc index 35dbb279..f6243a31 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_utils_internal.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_utils_internal.cpython-311.pyc index 70977bc2..89a7977a 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_utils_internal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_utils_internal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_vmap_internals.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_vmap_internals.cpython-311.pyc index 526811d9..9a5d7001 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_vmap_internals.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_vmap_internals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/_weights_only_unpickler.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/_weights_only_unpickler.cpython-311.pyc index 82eff95b..04ef3e17 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/_weights_only_unpickler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/_weights_only_unpickler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/functional.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/functional.cpython-311.pyc index 9dd13c85..e4267ce7 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/functional.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/hub.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/hub.cpython-311.pyc index bc2df3c9..a75641f0 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/hub.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/hub.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/library.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/library.cpython-311.pyc index 7777d188..f5fcf393 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/library.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/library.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/overrides.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/overrides.cpython-311.pyc index aca3ffd8..5095efe9 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/overrides.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/overrides.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/quasirandom.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/quasirandom.cpython-311.pyc index 137a9135..c1ea5668 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/quasirandom.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/quasirandom.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/random.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/random.cpython-311.pyc index 080fc0c0..c4c48a17 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/random.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/random.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/return_types.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/return_types.cpython-311.pyc index 5138b9ec..cf1eaba5 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/return_types.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/return_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/serialization.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/serialization.cpython-311.pyc index 9ace25a0..dc04a06f 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/serialization.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/serialization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/storage.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/storage.cpython-311.pyc index 961cba5e..4a40a33c 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/storage.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/storage.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/torch_version.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/torch_version.cpython-311.pyc index 6f08e12b..87707de4 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/torch_version.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/torch_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/types.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/types.cpython-311.pyc index 284e43d0..ceedb89a 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/types.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/torch/__pycache__/version.cpython-311.pyc index 6c8fae02..abbd50fd 100644 Binary files a/.venv/Lib/site-packages/torch/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/torch/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_awaits/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_awaits/__pycache__/__init__.cpython-311.pyc index 5af0467d..3a3213a6 100644 Binary files a/.venv/Lib/site-packages/torch/_awaits/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_awaits/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_custom_op/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_custom_op/__pycache__/__init__.cpython-311.pyc index 9c94f5a9..a9b96004 100644 Binary files a/.venv/Lib/site-packages/torch/_custom_op/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_custom_op/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_custom_op/__pycache__/autograd.cpython-311.pyc b/.venv/Lib/site-packages/torch/_custom_op/__pycache__/autograd.cpython-311.pyc index f4aa3789..f1fa70f1 100644 Binary files a/.venv/Lib/site-packages/torch/_custom_op/__pycache__/autograd.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_custom_op/__pycache__/autograd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_custom_op/__pycache__/impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/_custom_op/__pycache__/impl.cpython-311.pyc index 28dbf29a..391b3006 100644 Binary files a/.venv/Lib/site-packages/torch/_custom_op/__pycache__/impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_custom_op/__pycache__/impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_decomp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_decomp/__pycache__/__init__.cpython-311.pyc index a5d4aed0..e8c8eaee 100644 Binary files a/.venv/Lib/site-packages/torch/_decomp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_decomp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions.cpython-311.pyc b/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions.cpython-311.pyc index f09dede8..4d4a6495 100644 Binary files a/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions_for_rng.cpython-311.pyc b/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions_for_rng.cpython-311.pyc index 3effba7f..0f3e3b70 100644 Binary files a/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions_for_rng.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_decomp/__pycache__/decompositions_for_rng.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dispatch/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dispatch/__pycache__/__init__.cpython-311.pyc index 9674a462..8e9f5c95 100644 Binary files a/.venv/Lib/site-packages/torch/_dispatch/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dispatch/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dispatch/__pycache__/python.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dispatch/__pycache__/python.cpython-311.pyc index 445cb61a..c0cb4981 100644 Binary files a/.venv/Lib/site-packages/torch/_dispatch/__pycache__/python.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dispatch/__pycache__/python.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/__init__.cpython-311.pyc index c3ad0657..02d730ae 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/_trace_wrapped_higher_order_op.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/_trace_wrapped_higher_order_op.cpython-311.pyc index 124cb3ca..8610f2cf 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/_trace_wrapped_higher_order_op.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/_trace_wrapped_higher_order_op.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_analysis.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_analysis.cpython-311.pyc index 4184d0b6..a21c8a8b 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_analysis.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_transformation.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_transformation.cpython-311.pyc index d779a883..9fa44a5e 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_transformation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/bytecode_transformation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/cache_size.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/cache_size.cpython-311.pyc index 10bc44bc..04a196c3 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/cache_size.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/cache_size.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/callback.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/callback.cpython-311.pyc index 59aa9382..5c5eb910 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/callback.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/callback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/code_context.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/code_context.cpython-311.pyc index 793cad2a..1d8e33f7 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/code_context.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/code_context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/codegen.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/codegen.cpython-311.pyc index d64efc0a..d3de7bb7 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/codegen.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/codegen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/compiled_autograd.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/compiled_autograd.cpython-311.pyc index 36ab7f3d..b599a8e9 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/compiled_autograd.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/compiled_autograd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/comptime.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/comptime.cpython-311.pyc index b504402a..127c8caa 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/comptime.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/comptime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/config.cpython-311.pyc index acc9d664..8b74a701 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/convert_frame.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/convert_frame.cpython-311.pyc index e37210ec..08b001e2 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/convert_frame.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/convert_frame.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/current_scope_id.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/current_scope_id.cpython-311.pyc index 2c4fc1ce..3e851003 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/current_scope_id.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/current_scope_id.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/decorators.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/decorators.cpython-311.pyc index 710dbe44..21b3417a 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/decorators.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/device_interface.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/device_interface.cpython-311.pyc index c4b5be96..15f14fce 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/device_interface.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/device_interface.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/eval_frame.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/eval_frame.cpython-311.pyc index 4b9e6176..4e00f034 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/eval_frame.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/eval_frame.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/exc.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/exc.cpython-311.pyc index 5dcb0f6c..11dd5fb8 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/exc.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/exc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/external_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/external_utils.cpython-311.pyc index fd7413f4..1019efab 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/external_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/external_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/funcname_cache.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/funcname_cache.cpython-311.pyc index 4de45814..ba6b57f3 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/funcname_cache.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/funcname_cache.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/guards.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/guards.cpython-311.pyc index 16d07cc7..034fec10 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/guards.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/guards.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/hooks.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/hooks.cpython-311.pyc index 49be6789..701cab38 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/hooks.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/hooks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/logging.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/logging.cpython-311.pyc index 8b33508d..e4146203 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/logging.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/logging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/mutation_guard.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/mutation_guard.cpython-311.pyc index 6dd311e0..fd2a58bd 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/mutation_guard.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/mutation_guard.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/output_graph.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/output_graph.cpython-311.pyc index 9c142fd3..fa14fe59 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/output_graph.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/output_graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/polyfill.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/polyfill.cpython-311.pyc index a3781db0..125f7447 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/polyfill.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/polyfill.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/replay_record.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/replay_record.cpython-311.pyc index 39f358b0..ad2fb394 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/replay_record.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/replay_record.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/resume_execution.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/resume_execution.cpython-311.pyc index bb218eb1..67da688a 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/resume_execution.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/resume_execution.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/side_effects.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/side_effects.cpython-311.pyc index 37bf9d5a..bbd83fa5 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/side_effects.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/side_effects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/source.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/source.cpython-311.pyc index 545091df..9a448f21 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/source.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/source.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-311.pyc index e33c8779..9cbe07eb 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/trace_rules.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/trace_rules.cpython-311.pyc index 8360c1e9..5d71752c 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/trace_rules.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/trace_rules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/types.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/types.cpython-311.pyc index ce191571..d42f152f 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/types.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/utils.cpython-311.pyc index 5aaf708d..99c1ad65 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/__init__.cpython-311.pyc index 5110d896..32e6a1bc 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/registry.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/registry.cpython-311.pyc index 87d5869f..a03099eb 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/registry.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/backends/__pycache__/registry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/__init__.cpython-311.pyc index 122970fd..0822007c 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/base.cpython-311.pyc index 07b46db4..700f20a3 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builder.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builder.cpython-311.pyc index 095d1a57..be692637 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builder.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builtin.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builtin.cpython-311.pyc index 5f58eaac..602adb6f 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builtin.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/builtin.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/constant.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/constant.cpython-311.pyc index 8576ca7e..754d755f 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/constant.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/constant.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/ctx_manager.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/ctx_manager.cpython-311.pyc index f8a7fa43..36ffe682 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/ctx_manager.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/ctx_manager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/dicts.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/dicts.cpython-311.pyc index 9a3a6986..85325c79 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/dicts.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/dicts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/distributed.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/distributed.cpython-311.pyc index a528c489..dda61ec6 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/distributed.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/distributed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/functions.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/functions.cpython-311.pyc index 4613ab18..fa5b9e61 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/functions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/higher_order_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/higher_order_ops.cpython-311.pyc index ba48ccf3..5efa5b4b 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/higher_order_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/higher_order_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/iter.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/iter.cpython-311.pyc index 90f0fdac..99809769 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/iter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/iter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lazy.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lazy.cpython-311.pyc index 933caac3..adf685d6 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lazy.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lazy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lists.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lists.cpython-311.pyc index 1e4f81b8..610bd54a 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lists.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/lists.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/misc.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/misc.cpython-311.pyc index e4377b3b..fadbc3ae 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/misc.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/misc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/nn_module.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/nn_module.cpython-311.pyc index f2d0fd8b..4a48358a 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/nn_module.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/nn_module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/optimizer.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/optimizer.cpython-311.pyc index 600673ed..2ca79634 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/optimizer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/optimizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/sdpa.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/sdpa.cpython-311.pyc index 3174bafd..3eb260cc 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/sdpa.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/sdpa.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/tensor.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/tensor.cpython-311.pyc index 1913899a..148bd4d6 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/tensor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/tensor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch.cpython-311.pyc index c76b8bdd..2c4917d1 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch_function.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch_function.cpython-311.pyc index ea5d058d..5ad33afc 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch_function.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/torch_function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/user_defined.cpython-311.pyc b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/user_defined.cpython-311.pyc index 8df1389d..2e651ef7 100644 Binary files a/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/user_defined.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_dynamo/variables/__pycache__/user_defined.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/__init__.cpython-311.pyc index 7fc59830..107d5b83 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/aot_autograd.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/aot_autograd.cpython-311.pyc index 7330b42c..d5372ff4 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/aot_autograd.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/aot_autograd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/apis.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/apis.cpython-311.pyc index b709dffa..86fa33c2 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/apis.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/apis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/autograd_function.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/autograd_function.cpython-311.pyc index ec1aebc4..3bcf8978 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/autograd_function.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/autograd_function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/batch_norm_replacement.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/batch_norm_replacement.cpython-311.pyc index b4c83581..fcecd26d 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/batch_norm_replacement.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/batch_norm_replacement.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/compile_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/compile_utils.cpython-311.pyc index 0a49585f..509d7ac2 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/compile_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/compile_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/config.cpython-311.pyc index cc1d5a7b..2e167010 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/deprecated.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/deprecated.cpython-311.pyc index 4acad9f7..bc6ab790 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/deprecated.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/deprecated.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/eager_transforms.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/eager_transforms.cpython-311.pyc index 340e09b1..923fcaf8 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/eager_transforms.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/eager_transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/functional_call.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/functional_call.cpython-311.pyc index 282d7695..0cadda4b 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/functional_call.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/functional_call.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/make_functional.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/make_functional.cpython-311.pyc index a5d53c8c..ef88af83 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/make_functional.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/make_functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/partitioners.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/partitioners.cpython-311.pyc index f7fd5bfa..28ef3183 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/partitioners.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/partitioners.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/pyfunctorch.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/pyfunctorch.cpython-311.pyc index 0fe72543..cf0f40bd 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/pyfunctorch.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/pyfunctorch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/utils.cpython-311.pyc index 5eedf7ea..32858d3c 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/__pycache__/vmap.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/__pycache__/vmap.cpython-311.pyc index c234e09a..3a821b07 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/__pycache__/vmap.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/__pycache__/vmap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/__init__.cpython-311.pyc index dce19723..2a415203 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/collect_metadata_analysis.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/collect_metadata_analysis.cpython-311.pyc index 5503f537..74f622ed 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/collect_metadata_analysis.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/collect_metadata_analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/dispatch_and_compile_graph.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/dispatch_and_compile_graph.cpython-311.pyc index 9be38b1a..8e2f995b 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/dispatch_and_compile_graph.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/dispatch_and_compile_graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/functional_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/functional_utils.cpython-311.pyc index 019870b6..f4535783 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/functional_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/functional_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/input_output_analysis.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/input_output_analysis.cpython-311.pyc index bda8ad0e..cddb77d2 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/input_output_analysis.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/input_output_analysis.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/jit_compile_runtime_wrappers.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/jit_compile_runtime_wrappers.cpython-311.pyc index 32dbb15b..8e60298f 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/jit_compile_runtime_wrappers.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/jit_compile_runtime_wrappers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/logging_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/logging_utils.cpython-311.pyc index 3d13e475..1129b9d9 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/logging_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/logging_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/runtime_wrappers.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/runtime_wrappers.cpython-311.pyc index d7ab1eb9..13da416c 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/runtime_wrappers.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/runtime_wrappers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/schemas.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/schemas.cpython-311.pyc index f4959ce9..720b21ff 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/schemas.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/schemas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/subclass_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/subclass_utils.cpython-311.pyc index 34c841c0..e699e1d8 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/subclass_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/subclass_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/traced_function_transforms.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/traced_function_transforms.cpython-311.pyc index 7cc14bb7..6bc8fed3 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/traced_function_transforms.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/traced_function_transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/utils.cpython-311.pyc index f811c464..cdbca48f 100644 Binary files a/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_functorch/_aot_autograd/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/__init__.cpython-311.pyc index dc8933ff..f112cc02 100644 Binary files a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/cond.cpython-311.pyc b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/cond.cpython-311.pyc index 09222ca1..6d79d9c2 100644 Binary files a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/cond.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/cond.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/map.cpython-311.pyc b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/map.cpython-311.pyc index b6e41a6d..61e3654c 100644 Binary files a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/map.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/map.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/out_dtype.cpython-311.pyc b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/out_dtype.cpython-311.pyc index d4d38b81..54f8d3f8 100644 Binary files a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/out_dtype.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/out_dtype.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/utils.cpython-311.pyc index bcdbd43b..b046998d 100644 Binary files a/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_higher_order_ops/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_inductor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_inductor/__pycache__/__init__.cpython-311.pyc index 7694ac0e..961a68c3 100644 Binary files a/.venv/Lib/site-packages/torch/_inductor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_inductor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_inductor/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/torch/_inductor/__pycache__/config.cpython-311.pyc index 2a600e9b..ae0d8517 100644 Binary files a/.venv/Lib/site-packages/torch/_inductor/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_inductor/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_inductor/__pycache__/test_operators.cpython-311.pyc b/.venv/Lib/site-packages/torch/_inductor/__pycache__/test_operators.cpython-311.pyc index 93489a86..6e338f61 100644 Binary files a/.venv/Lib/site-packages/torch/_inductor/__pycache__/test_operators.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_inductor/__pycache__/test_operators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_library/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_library/__pycache__/__init__.cpython-311.pyc index 4061c651..cd7f0081 100644 Binary files a/.venv/Lib/site-packages/torch/_library/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_library/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_library/__pycache__/abstract_impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/_library/__pycache__/abstract_impl.cpython-311.pyc index d2805985..fed64577 100644 Binary files a/.venv/Lib/site-packages/torch/_library/__pycache__/abstract_impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_library/__pycache__/abstract_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_library/__pycache__/simple_registry.cpython-311.pyc b/.venv/Lib/site-packages/torch/_library/__pycache__/simple_registry.cpython-311.pyc index 37bc6623..44ca0ba5 100644 Binary files a/.venv/Lib/site-packages/torch/_library/__pycache__/simple_registry.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_library/__pycache__/simple_registry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_library/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_library/__pycache__/utils.cpython-311.pyc index 0a50bebe..9ddd8dd6 100644 Binary files a/.venv/Lib/site-packages/torch/_library/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_library/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_logging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_logging/__pycache__/__init__.cpython-311.pyc index 32649953..717e058e 100644 Binary files a/.venv/Lib/site-packages/torch/_logging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_logging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_logging/__pycache__/_internal.cpython-311.pyc b/.venv/Lib/site-packages/torch/_logging/__pycache__/_internal.cpython-311.pyc index 3d016cf2..82acff60 100644 Binary files a/.venv/Lib/site-packages/torch/_logging/__pycache__/_internal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_logging/__pycache__/_internal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_logging/__pycache__/_registrations.cpython-311.pyc b/.venv/Lib/site-packages/torch/_logging/__pycache__/_registrations.cpython-311.pyc index aeb34517..6056367e 100644 Binary files a/.venv/Lib/site-packages/torch/_logging/__pycache__/_registrations.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_logging/__pycache__/_registrations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_logging/__pycache__/structured.cpython-311.pyc b/.venv/Lib/site-packages/torch/_logging/__pycache__/structured.cpython-311.pyc index aadf310a..9be51ef9 100644 Binary files a/.venv/Lib/site-packages/torch/_logging/__pycache__/structured.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_logging/__pycache__/structured.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/__init__.cpython-311.pyc index 928caa60..c3258819 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_binary_ufuncs_impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_binary_ufuncs_impl.cpython-311.pyc index 30872a3e..aa4c5bff 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_binary_ufuncs_impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_binary_ufuncs_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_casting_dicts.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_casting_dicts.cpython-311.pyc index c935d22a..5da55805 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_casting_dicts.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_casting_dicts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes.cpython-311.pyc index d4bf64b8..577812be 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes_impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes_impl.cpython-311.pyc index 611e8760..0137fb5a 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes_impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_dtypes_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs.cpython-311.pyc index 745679f2..adec4e4b 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs_impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs_impl.cpython-311.pyc index e6b751c3..5a48d5ee 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs_impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_funcs_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_getlimits.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_getlimits.cpython-311.pyc index ceb0ba5a..1df4b801 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_getlimits.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_getlimits.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ndarray.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ndarray.cpython-311.pyc index 5cb60581..ca915531 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ndarray.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ndarray.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_normalizations.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_normalizations.cpython-311.pyc index ffdf62ec..259d8e52 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_normalizations.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_normalizations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_reductions_impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_reductions_impl.cpython-311.pyc index 0b2a546a..b8e7414e 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_reductions_impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_reductions_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ufuncs.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ufuncs.cpython-311.pyc index 1a46600e..fac93014 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ufuncs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_ufuncs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_unary_ufuncs_impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_unary_ufuncs_impl.cpython-311.pyc index 0ea36cbd..46c48094 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_unary_ufuncs_impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_unary_ufuncs_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_util.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_util.cpython-311.pyc index 500bb68c..ee0b7e31 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/_util.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/fft.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/fft.cpython-311.pyc index 01dd9714..f61b1309 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/fft.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/fft.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/linalg.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/linalg.cpython-311.pyc index ae321a4f..e82900e6 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/linalg.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/linalg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_numpy/__pycache__/random.cpython-311.pyc b/.venv/Lib/site-packages/torch/_numpy/__pycache__/random.cpython-311.pyc index 038994ad..4881ee8a 100644 Binary files a/.venv/Lib/site-packages/torch/_numpy/__pycache__/random.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_numpy/__pycache__/random.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_prims/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_prims/__pycache__/__init__.cpython-311.pyc index 70998230..489e6974 100644 Binary files a/.venv/Lib/site-packages/torch/_prims/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_prims/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_prims/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/torch/_prims/__pycache__/context.cpython-311.pyc index 8945f235..a7796325 100644 Binary files a/.venv/Lib/site-packages/torch/_prims/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_prims/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_prims/__pycache__/debug_prims.cpython-311.pyc b/.venv/Lib/site-packages/torch/_prims/__pycache__/debug_prims.cpython-311.pyc index c4ea0aa5..98c118fd 100644 Binary files a/.venv/Lib/site-packages/torch/_prims/__pycache__/debug_prims.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_prims/__pycache__/debug_prims.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_prims/__pycache__/executor.cpython-311.pyc b/.venv/Lib/site-packages/torch/_prims/__pycache__/executor.cpython-311.pyc index 416456a5..962a0df2 100644 Binary files a/.venv/Lib/site-packages/torch/_prims/__pycache__/executor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_prims/__pycache__/executor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_prims/__pycache__/rng_prims.cpython-311.pyc b/.venv/Lib/site-packages/torch/_prims/__pycache__/rng_prims.cpython-311.pyc index 50887b2d..7cb066de 100644 Binary files a/.venv/Lib/site-packages/torch/_prims/__pycache__/rng_prims.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_prims/__pycache__/rng_prims.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_prims_common/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_prims_common/__pycache__/__init__.cpython-311.pyc index 75d5998c..fa773bb1 100644 Binary files a/.venv/Lib/site-packages/torch/_prims_common/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_prims_common/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_prims_common/__pycache__/wrappers.cpython-311.pyc b/.venv/Lib/site-packages/torch/_prims_common/__pycache__/wrappers.cpython-311.pyc index 9191d7e3..e57c535f 100644 Binary files a/.venv/Lib/site-packages/torch/_prims_common/__pycache__/wrappers.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_prims_common/__pycache__/wrappers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_refs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_refs/__pycache__/__init__.cpython-311.pyc index 8eff7672..bee66db5 100644 Binary files a/.venv/Lib/site-packages/torch/_refs/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_refs/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_refs/__pycache__/_conversions.cpython-311.pyc b/.venv/Lib/site-packages/torch/_refs/__pycache__/_conversions.cpython-311.pyc index d1aa8d6b..902c81fc 100644 Binary files a/.venv/Lib/site-packages/torch/_refs/__pycache__/_conversions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_refs/__pycache__/_conversions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_refs/__pycache__/fft.cpython-311.pyc b/.venv/Lib/site-packages/torch/_refs/__pycache__/fft.cpython-311.pyc index 302fbd94..529d263a 100644 Binary files a/.venv/Lib/site-packages/torch/_refs/__pycache__/fft.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_refs/__pycache__/fft.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_refs/linalg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_refs/linalg/__pycache__/__init__.cpython-311.pyc index f983fa5e..803d296f 100644 Binary files a/.venv/Lib/site-packages/torch/_refs/linalg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_refs/linalg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_refs/nn/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_refs/nn/__pycache__/__init__.cpython-311.pyc index 0dcf648e..6b51a469 100644 Binary files a/.venv/Lib/site-packages/torch/_refs/nn/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_refs/nn/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_refs/nn/functional/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_refs/nn/functional/__pycache__/__init__.cpython-311.pyc index 1bf8f602..1959f851 100644 Binary files a/.venv/Lib/site-packages/torch/_refs/nn/functional/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_refs/nn/functional/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_refs/special/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_refs/special/__pycache__/__init__.cpython-311.pyc index 7081a020..7e6c9a7e 100644 Binary files a/.venv/Lib/site-packages/torch/_refs/special/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_refs/special/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/__init__.cpython-311.pyc index 50346d66..3539cbb8 100644 Binary files a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_impls.cpython-311.pyc b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_impls.cpython-311.pyc index 6227b6ae..cc01442e 100644 Binary files a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_impls.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_impls.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_tensor.cpython-311.pyc b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_tensor.cpython-311.pyc index 9b7c88a5..f0954ef2 100644 Binary files a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_tensor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_tensor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_utils.cpython-311.pyc index 89f28bf4..1d114b7c 100644 Binary files a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/fake_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/functional_tensor.cpython-311.pyc b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/functional_tensor.cpython-311.pyc index 3eae8e70..c1320dc7 100644 Binary files a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/functional_tensor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/functional_tensor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/meta_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/meta_utils.cpython-311.pyc index 938812d5..ab154611 100644 Binary files a/.venv/Lib/site-packages/torch/_subclasses/__pycache__/meta_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_subclasses/__pycache__/meta_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_vendor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_vendor/__pycache__/__init__.cpython-311.pyc index e8477a5a..3f84318b 100644 Binary files a/.venv/Lib/site-packages/torch/_vendor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_vendor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/__init__.cpython-311.pyc index 5b94c4cb..2c885dac 100644 Binary files a/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/_structures.cpython-311.pyc b/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/_structures.cpython-311.pyc index b07ce094..90cf0993 100644 Binary files a/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/_structures.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/version.cpython-311.pyc index 7f801255..a640f1f9 100644 Binary files a/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/torch/_vendor/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/amp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/amp/__pycache__/__init__.cpython-311.pyc index 59625d4c..33fccc4b 100644 Binary files a/.venv/Lib/site-packages/torch/amp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/amp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/amp/__pycache__/autocast_mode.cpython-311.pyc b/.venv/Lib/site-packages/torch/amp/__pycache__/autocast_mode.cpython-311.pyc index 3559af6a..c0339f7a 100644 Binary files a/.venv/Lib/site-packages/torch/amp/__pycache__/autocast_mode.cpython-311.pyc and b/.venv/Lib/site-packages/torch/amp/__pycache__/autocast_mode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/amp/__pycache__/grad_scaler.cpython-311.pyc b/.venv/Lib/site-packages/torch/amp/__pycache__/grad_scaler.cpython-311.pyc index 3ec93b61..ecb08e71 100644 Binary files a/.venv/Lib/site-packages/torch/amp/__pycache__/grad_scaler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/amp/__pycache__/grad_scaler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/__pycache__/__init__.cpython-311.pyc index c92de2f1..c26f05eb 100644 Binary files a/.venv/Lib/site-packages/torch/ao/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/__pycache__/__init__.cpython-311.pyc index 96520de5..f55be2d0 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/__pycache__/__init__.cpython-311.pyc index f64e1bb9..fb530316 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc index df586987..5dd8a88a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc index 1e467185..f67ed6ac 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc index 395198f3..abcd421f 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc index fac0b369..9be09090 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc index 5e1000d3..27adc0c4 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc index f6674724..b7c146ef 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc index 25154588..ecba0e8a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc index 725616de..7f16810d 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc index 45e098b5..a6263d22 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc index 73d88f58..2034af39 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc index 614fe173..fa70c26a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc index 9bd6a833..1f68c050 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc index 51317d6a..9cefa03e 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_add.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_add.cpython-311.pyc index 6fc0f697..74a24b74 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_add.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_add.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc index ace03f78..609d806b 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc index 379e1e54..fda83140 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/__pycache__/__init__.cpython-311.pyc index 9e63a144..8e81392c 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc index e1cf53e4..bc20b5de 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc index 7586a21e..832cb3d9 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc index a066b440..21916b07 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/__init__.cpython-311.pyc index 767b89ec..0cac7ac3 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/conv.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/conv.cpython-311.pyc index fecbda76..a3cd7172 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/conv.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/conv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc index 0d6b0f1a..89d14ff9 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/linear.cpython-311.pyc index ba5f47aa..744b110a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/qat/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantizable/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantizable/__pycache__/__init__.cpython-311.pyc index 6013b663..3265cf93 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantizable/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantizable/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc index 55fb4c60..59736567 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/activation.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/activation.cpython-311.pyc index bd16ffc0..80b1508f 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/activation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/activation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/rnn.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/rnn.cpython-311.pyc index 6f2ac383..edd438f2 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/rnn.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantizable/modules/__pycache__/rnn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/__init__.cpython-311.pyc index 7c0dc81a..35c5fa22 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/functional.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/functional.cpython-311.pyc index f89f6110..b79586fd 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/functional.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/__pycache__/functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc index a05b21f1..704b8ba0 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc index bfbf7372..77297b5a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/conv.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/conv.cpython-311.pyc index d44ba602..3c2170af 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/conv.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/conv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/linear.cpython-311.pyc index 7105a981..5f4eafb9 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/rnn.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/rnn.cpython-311.pyc index fd678d69..a1825c0b 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/rnn.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/dynamic/modules/__pycache__/rnn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc index 81d6d608..87830326 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/activation.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/activation.cpython-311.pyc index 2117733d..b0ea29f5 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/activation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/activation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/batchnorm.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/batchnorm.cpython-311.pyc index 8df6ec9a..4ba61460 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/batchnorm.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/batchnorm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/conv.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/conv.cpython-311.pyc index f8bff551..daea0bc2 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/conv.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/conv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/dropout.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/dropout.cpython-311.pyc index 7e91a5a4..fe0be3fc 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/dropout.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/dropout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/embedding_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/embedding_ops.cpython-311.pyc index 90cd404f..88dd9adf 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/embedding_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/embedding_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/functional_modules.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/functional_modules.cpython-311.pyc index ba88ff84..05538bc7 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/functional_modules.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/functional_modules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/linear.cpython-311.pyc index 5b0ce7c4..b59b0cc2 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/normalization.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/normalization.cpython-311.pyc index 103a3548..09ed612e 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/normalization.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/normalization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/rnn.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/rnn.cpython-311.pyc index 8be7dcd2..aba76ac1 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/rnn.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/rnn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/utils.cpython-311.pyc index 4c995310..c92ad0d6 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/modules/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/__pycache__/__init__.cpython-311.pyc index 6cb99cc1..3c16feb3 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/__init__.cpython-311.pyc index 1ad837cb..1eaf8296 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/conv.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/conv.cpython-311.pyc index 8ee596b7..e6003d1a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/conv.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/conv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/linear.cpython-311.pyc index 3e31d3a5..2ab3e58a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/rnn.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/rnn.cpython-311.pyc index e2370bfa..e5adc0b4 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/rnn.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/rnn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/sparse.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/sparse.cpython-311.pyc index bc5a7412..d6216005 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/sparse.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/sparse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/utils.cpython-311.pyc index f0a14090..44ec19ff 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/quantized/reference/modules/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/sparse/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/sparse/__pycache__/__init__.cpython-311.pyc index e0b3cfe5..14d38eb6 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/sparse/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/sparse/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/__init__.cpython-311.pyc index 67a9c64c..b42f3637 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/linear.cpython-311.pyc index 61faaaaf..5b374282 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/utils.cpython-311.pyc index 99ba6515..c9da2d0c 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/__init__.cpython-311.pyc index 9ff3c42d..f59aa88f 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/linear.cpython-311.pyc index 1aa1d680..b1fca87e 100644 Binary files a/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/nn/sparse/quantized/dynamic/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/__init__.cpython-311.pyc index 7479082f..60220888 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fake_quantize.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fake_quantize.cpython-311.pyc index b4fe3096..e40ff1ae 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fake_quantize.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fake_quantize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuse_modules.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuse_modules.cpython-311.pyc index f7614308..22bf6e7f 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuse_modules.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuse_modules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc index 1f675c19..97ac219e 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/observer.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/observer.cpython-311.pyc index dbc5552f..05798fd8 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/observer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/observer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig.cpython-311.pyc index c3703d88..905355f5 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig_mapping.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig_mapping.cpython-311.pyc index 35c76019..649968a7 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig_mapping.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/qconfig_mapping.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quant_type.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quant_type.cpython-311.pyc index 152f0d96..508caa69 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quant_type.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quant_type.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantization_mappings.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantization_mappings.cpython-311.pyc index c81d4997..df657c36 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantization_mappings.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantization_mappings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize.cpython-311.pyc index e99aa3ee..d2e1067d 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize_jit.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize_jit.cpython-311.pyc index b6b0fa3d..d758e3c3 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize_jit.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/quantize_jit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/stubs.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/stubs.cpython-311.pyc index d0d7b9ee..bd223f7a 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/stubs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/stubs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/utils.cpython-311.pyc index f554b457..04c66fa7 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/__init__.cpython-311.pyc index eb7da4c0..23ed416b 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/export_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/export_utils.cpython-311.pyc index e516a6cd..0fad5fee 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/export_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/export_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/generate_numeric_debug_handle.cpython-311.pyc b/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/generate_numeric_debug_handle.cpython-311.pyc index 34236e96..40abeef3 100644 Binary files a/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/generate_numeric_debug_handle.cpython-311.pyc and b/.venv/Lib/site-packages/torch/ao/quantization/pt2e/__pycache__/generate_numeric_debug_handle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/__init__.cpython-311.pyc index f15d02b2..2974df3a 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/anomaly_mode.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/anomaly_mode.cpython-311.pyc index 2849e67f..090056a0 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/anomaly_mode.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/anomaly_mode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/forward_ad.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/forward_ad.cpython-311.pyc index 61d7bf02..0f3e447f 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/forward_ad.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/forward_ad.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/function.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/function.cpython-311.pyc index 889d29c6..bf01c231 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/function.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/function.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/functional.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/functional.cpython-311.pyc index fb1886bb..95fa95aa 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/functional.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/grad_mode.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/grad_mode.cpython-311.pyc index 2b2650f2..a4ab7773 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/grad_mode.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/grad_mode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/gradcheck.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/gradcheck.cpython-311.pyc index 5d3dffb5..872a7b7f 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/gradcheck.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/gradcheck.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/graph.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/graph.cpython-311.pyc index 7c056604..bccc9cf2 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/graph.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler.cpython-311.pyc index 91756cdb..4687d472 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler_util.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler_util.cpython-311.pyc index 0e340b0e..3bf45367 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler_util.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/profiler_util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/autograd/__pycache__/variable.cpython-311.pyc b/.venv/Lib/site-packages/torch/autograd/__pycache__/variable.cpython-311.pyc index 7fbb0891..4f7b566c 100644 Binary files a/.venv/Lib/site-packages/torch/autograd/__pycache__/variable.cpython-311.pyc and b/.venv/Lib/site-packages/torch/autograd/__pycache__/variable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/__pycache__/__init__.cpython-311.pyc index fea8b38d..8ccca21a 100644 Binary files a/.venv/Lib/site-packages/torch/backends/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/cpu/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/cpu/__pycache__/__init__.cpython-311.pyc index 5a57c248..92d1e600 100644 Binary files a/.venv/Lib/site-packages/torch/backends/cpu/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/cpu/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/cuda/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/cuda/__pycache__/__init__.cpython-311.pyc index 241d6056..b5e81d28 100644 Binary files a/.venv/Lib/site-packages/torch/backends/cuda/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/cuda/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/cudnn/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/cudnn/__pycache__/__init__.cpython-311.pyc index c9515790..4ceb594f 100644 Binary files a/.venv/Lib/site-packages/torch/backends/cudnn/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/cudnn/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/mha/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/mha/__pycache__/__init__.cpython-311.pyc index 8780fb6f..ab332887 100644 Binary files a/.venv/Lib/site-packages/torch/backends/mha/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/mha/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/mkl/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/mkl/__pycache__/__init__.cpython-311.pyc index ae829d0e..bfb6a49f 100644 Binary files a/.venv/Lib/site-packages/torch/backends/mkl/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/mkl/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/mkldnn/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/mkldnn/__pycache__/__init__.cpython-311.pyc index 18eba1db..477f79b3 100644 Binary files a/.venv/Lib/site-packages/torch/backends/mkldnn/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/mkldnn/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/mps/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/mps/__pycache__/__init__.cpython-311.pyc index 6868f2a6..37723e37 100644 Binary files a/.venv/Lib/site-packages/torch/backends/mps/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/mps/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/nnpack/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/nnpack/__pycache__/__init__.cpython-311.pyc index 58999fc3..20314615 100644 Binary files a/.venv/Lib/site-packages/torch/backends/nnpack/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/nnpack/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/openmp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/openmp/__pycache__/__init__.cpython-311.pyc index d9ecd106..80eacff2 100644 Binary files a/.venv/Lib/site-packages/torch/backends/openmp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/openmp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/opt_einsum/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/opt_einsum/__pycache__/__init__.cpython-311.pyc index 4172cc81..ea67fb4a 100644 Binary files a/.venv/Lib/site-packages/torch/backends/opt_einsum/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/opt_einsum/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/backends/quantized/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/backends/quantized/__pycache__/__init__.cpython-311.pyc index 52445c60..640315aa 100644 Binary files a/.venv/Lib/site-packages/torch/backends/quantized/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/backends/quantized/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/compiler/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/compiler/__pycache__/__init__.cpython-311.pyc index 6fb7fcd4..0f76428f 100644 Binary files a/.venv/Lib/site-packages/torch/compiler/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/compiler/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cpu/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/cpu/__pycache__/__init__.cpython-311.pyc index 9af8ead6..e2e6bbdf 100644 Binary files a/.venv/Lib/site-packages/torch/cpu/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cpu/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/__init__.cpython-311.pyc index 93bdb26c..829de562 100644 Binary files a/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/autocast_mode.cpython-311.pyc b/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/autocast_mode.cpython-311.pyc index e132132d..6782e03b 100644 Binary files a/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/autocast_mode.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/autocast_mode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/grad_scaler.cpython-311.pyc b/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/grad_scaler.cpython-311.pyc index 57d1172c..77c78f8c 100644 Binary files a/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/grad_scaler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cpu/amp/__pycache__/grad_scaler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/__init__.cpython-311.pyc index 3e1dd527..4a05eb06 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/_memory_viz.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/_memory_viz.cpython-311.pyc index d0a49839..49753594 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/_memory_viz.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/_memory_viz.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/_utils.cpython-311.pyc index 55fa7428..7bbc20a0 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/graphs.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/graphs.cpython-311.pyc index 2ef00be8..5c7635f9 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/graphs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/graphs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/jiterator.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/jiterator.cpython-311.pyc index 3aa2d72f..f4b39b4a 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/jiterator.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/jiterator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/memory.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/memory.cpython-311.pyc index 97165e8d..78ccb902 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/memory.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/memory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/nccl.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/nccl.cpython-311.pyc index 36e6f8ea..b0dc07db 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/nccl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/nccl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/nvtx.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/nvtx.cpython-311.pyc index 315fe5ef..41701e40 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/nvtx.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/nvtx.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/profiler.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/profiler.cpython-311.pyc index 01748064..96e81ecf 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/profiler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/profiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/random.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/random.cpython-311.pyc index ce0fa4f4..7c89962b 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/random.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/random.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/sparse.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/sparse.cpython-311.pyc index e6338b09..9e09e194 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/sparse.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/sparse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/__pycache__/streams.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/__pycache__/streams.cpython-311.pyc index 4390823d..ebe2c04a 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/__pycache__/streams.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/__pycache__/streams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/__init__.cpython-311.pyc index 8eddafc4..619b4dc6 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/autocast_mode.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/autocast_mode.cpython-311.pyc index 1e4dae92..3915b26d 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/autocast_mode.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/autocast_mode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/common.cpython-311.pyc index f3b96520..46b76d0a 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/grad_scaler.cpython-311.pyc b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/grad_scaler.cpython-311.pyc index b23af159..b7bf7016 100644 Binary files a/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/grad_scaler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/cuda/amp/__pycache__/grad_scaler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/__init__.cpython-311.pyc index 78af84af..d47f17f2 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives.cpython-311.pyc index 98587048..359365eb 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives_impl.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives_impl.cpython-311.pyc index 16e4599e..3f881873 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives_impl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/_functional_collectives_impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/c10d_logger.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/c10d_logger.cpython-311.pyc index c52e3c5d..572f5372 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/c10d_logger.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/c10d_logger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/constants.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/constants.cpython-311.pyc index a5170549..95284ebf 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/constants.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/device_mesh.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/device_mesh.cpython-311.pyc index 8c1204f5..1b6c105e 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/device_mesh.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/device_mesh.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/distributed_c10d.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/distributed_c10d.cpython-311.pyc index 28201ca9..d52b1257 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/distributed_c10d.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/distributed_c10d.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/logging_handlers.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/logging_handlers.cpython-311.pyc index 96902c5e..d7dc6abf 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/logging_handlers.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/logging_handlers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/remote_device.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/remote_device.cpython-311.pyc index 544a1015..83bd568b 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/remote_device.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/remote_device.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/rendezvous.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/rendezvous.cpython-311.pyc index 47450d8c..456d8ea4 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/rendezvous.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/rendezvous.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/__pycache__/utils.cpython-311.pyc index b07db3f7..a6b94aa7 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/__init__.cpython-311.pyc index 47c67426..09c5e1d4 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_collective_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_collective_utils.cpython-311.pyc index 6e5c0ded..f58f0f09 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_collective_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_collective_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_utils.cpython-311.pyc index 44510d84..34a8e45b 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/api.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/api.cpython-311.pyc index b386d108..1daf28ff 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/api.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/api.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/device_mesh.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/device_mesh.cpython-311.pyc index 2833e513..633a85ee 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/device_mesh.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/device_mesh.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/dispatch.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/dispatch.cpython-311.pyc index c158a9fa..43e76d88 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/dispatch.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/dispatch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/op_schema.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/op_schema.cpython-311.pyc index f0c159db..1d1f68cf 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/op_schema.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/op_schema.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/placement_types.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/placement_types.cpython-311.pyc index 8146ebba..ef4321f7 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/placement_types.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/placement_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/random.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/random.cpython-311.pyc index 89001408..3b7eb806 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/random.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/random.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/redistribute.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/redistribute.cpython-311.pyc index 88a47784..93b417aa 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/redistribute.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/redistribute.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/sharding_prop.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/sharding_prop.cpython-311.pyc index b7b44d61..81e8f6e3 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/sharding_prop.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/sharding_prop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/tp_conv.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/tp_conv.cpython-311.pyc index 954b6748..7058d733 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/tp_conv.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/__pycache__/tp_conv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/__init__.cpython-311.pyc index d53fa030..3280a1a4 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/basic_strategy.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/basic_strategy.cpython-311.pyc index 049e87da..f067e320 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/basic_strategy.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/basic_strategy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/common_rules.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/common_rules.cpython-311.pyc index 29416497..fc1b2f43 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/common_rules.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/common_rules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/conv_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/conv_ops.cpython-311.pyc index ecbec2e6..5dd00043 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/conv_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/conv_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/embedding_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/embedding_ops.cpython-311.pyc index ce00c4c0..0026d8dc 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/embedding_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/embedding_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/experimental_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/experimental_ops.cpython-311.pyc index 5a86401f..d5b6dbb5 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/experimental_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/experimental_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/math_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/math_ops.cpython-311.pyc index a37f81fd..24dfd5ad 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/math_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/math_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/matrix_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/matrix_ops.cpython-311.pyc index af82948c..6481cb95 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/matrix_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/matrix_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/pointwise_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/pointwise_ops.cpython-311.pyc index 31c1c15a..126dea26 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/pointwise_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/pointwise_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/random_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/random_ops.cpython-311.pyc index 97bf8f96..5a7c5db6 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/random_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/random_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/tensor_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/tensor_ops.cpython-311.pyc index 64644194..b542a100 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/tensor_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/tensor_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/utils.cpython-311.pyc index b44c69dd..1b48c98a 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/view_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/view_ops.cpython-311.pyc index b80f46da..5a6108aa 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/view_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/_tensor/ops/__pycache__/view_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/__init__.cpython-311.pyc index 4da08d06..255677e1 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/join.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/join.cpython-311.pyc index 694f4dbb..2da49fe3 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/join.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/algorithms/__pycache__/join.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/autograd/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/autograd/__pycache__/__init__.cpython-311.pyc index 113957ad..6c3d73d1 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/autograd/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/autograd/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributed/rpc/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributed/rpc/__pycache__/__init__.cpython-311.pyc index 8e7ca003..6f38c957 100644 Binary files a/.venv/Lib/site-packages/torch/distributed/rpc/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributed/rpc/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/__init__.cpython-311.pyc index 5f9c1e89..d34d0b1f 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/bernoulli.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/bernoulli.cpython-311.pyc index 0ac8a9f6..2162ad6b 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/bernoulli.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/bernoulli.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/beta.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/beta.cpython-311.pyc index 2b97fb90..f3f4b65b 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/beta.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/beta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/binomial.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/binomial.cpython-311.pyc index 3b3da824..d1f8a4ab 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/binomial.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/binomial.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/categorical.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/categorical.cpython-311.pyc index c5942ba8..440c216a 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/categorical.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/categorical.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/cauchy.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/cauchy.cpython-311.pyc index 3fbfd3a0..4a053de1 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/cauchy.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/cauchy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/chi2.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/chi2.cpython-311.pyc index e3f02b35..65c767e2 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/chi2.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/chi2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/constraint_registry.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/constraint_registry.cpython-311.pyc index e54db0c8..0cb58535 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/constraint_registry.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/constraint_registry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/constraints.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/constraints.cpython-311.pyc index 1fe6ad66..8a96b6c1 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/constraints.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/constraints.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/continuous_bernoulli.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/continuous_bernoulli.cpython-311.pyc index fbea94b5..8e79542c 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/continuous_bernoulli.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/continuous_bernoulli.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/dirichlet.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/dirichlet.cpython-311.pyc index 04ac794d..192f0565 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/dirichlet.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/dirichlet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/distribution.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/distribution.cpython-311.pyc index a6c0797b..ac711b2c 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/distribution.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/distribution.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/exp_family.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/exp_family.cpython-311.pyc index 70b473ab..7ba644cb 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/exp_family.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/exp_family.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/exponential.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/exponential.cpython-311.pyc index c75fd091..145e77cd 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/exponential.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/exponential.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/fishersnedecor.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/fishersnedecor.cpython-311.pyc index 3c49106b..7dea483f 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/fishersnedecor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/fishersnedecor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/gamma.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/gamma.cpython-311.pyc index 8ff4d4b3..2e609f95 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/gamma.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/gamma.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/geometric.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/geometric.cpython-311.pyc index 58882159..0b60e5f3 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/geometric.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/geometric.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/gumbel.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/gumbel.cpython-311.pyc index d1de0544..efd91e79 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/gumbel.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/gumbel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/half_cauchy.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/half_cauchy.cpython-311.pyc index 0db69d49..b99e2115 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/half_cauchy.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/half_cauchy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/half_normal.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/half_normal.cpython-311.pyc index 2c5cb99c..4b8486d8 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/half_normal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/half_normal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/independent.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/independent.cpython-311.pyc index ff738ffb..441a4bf5 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/independent.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/independent.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/inverse_gamma.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/inverse_gamma.cpython-311.pyc index 1eded7d6..2774dc28 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/inverse_gamma.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/inverse_gamma.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/kl.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/kl.cpython-311.pyc index 8023ab52..e4ec4882 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/kl.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/kl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/kumaraswamy.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/kumaraswamy.cpython-311.pyc index 67f87f52..8aeb7a4f 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/kumaraswamy.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/kumaraswamy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/laplace.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/laplace.cpython-311.pyc index 26ad50f1..858d5a8d 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/laplace.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/laplace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/lkj_cholesky.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/lkj_cholesky.cpython-311.pyc index 03c91ea3..0778f5a4 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/lkj_cholesky.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/lkj_cholesky.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/log_normal.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/log_normal.cpython-311.pyc index baed68fd..f8794d7e 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/log_normal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/log_normal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/logistic_normal.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/logistic_normal.cpython-311.pyc index 50b1c0cd..dd9f3086 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/logistic_normal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/logistic_normal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/lowrank_multivariate_normal.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/lowrank_multivariate_normal.cpython-311.pyc index e729ff3b..d7364390 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/lowrank_multivariate_normal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/lowrank_multivariate_normal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/mixture_same_family.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/mixture_same_family.cpython-311.pyc index 169b399e..7950e562 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/mixture_same_family.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/mixture_same_family.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/multinomial.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/multinomial.cpython-311.pyc index fa1280cd..42157bf7 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/multinomial.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/multinomial.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/multivariate_normal.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/multivariate_normal.cpython-311.pyc index 704565e2..f618866a 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/multivariate_normal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/multivariate_normal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/negative_binomial.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/negative_binomial.cpython-311.pyc index 3ae43285..0d61752b 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/negative_binomial.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/negative_binomial.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/normal.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/normal.cpython-311.pyc index 1d050ee6..f90812e0 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/normal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/normal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/one_hot_categorical.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/one_hot_categorical.cpython-311.pyc index 6a127f0d..9bb799fc 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/one_hot_categorical.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/one_hot_categorical.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/pareto.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/pareto.cpython-311.pyc index 5c6b6f86..c4175b80 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/pareto.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/pareto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/poisson.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/poisson.cpython-311.pyc index 3143938a..218c2561 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/poisson.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/poisson.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_bernoulli.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_bernoulli.cpython-311.pyc index f3f852aa..860f23ab 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_bernoulli.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_bernoulli.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_categorical.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_categorical.cpython-311.pyc index cd979ab8..6c164fc5 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_categorical.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/relaxed_categorical.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/studentT.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/studentT.cpython-311.pyc index 0cb143e2..4a162d70 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/studentT.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/studentT.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/transformed_distribution.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/transformed_distribution.cpython-311.pyc index 6544446c..86b1379a 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/transformed_distribution.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/transformed_distribution.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/transforms.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/transforms.cpython-311.pyc index b83e3d82..92d07e54 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/transforms.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/uniform.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/uniform.cpython-311.pyc index bbae3b49..befb827e 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/uniform.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/uniform.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/utils.cpython-311.pyc index 746764e0..2864e727 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/von_mises.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/von_mises.cpython-311.pyc index f9d08f15..2d6ddac7 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/von_mises.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/von_mises.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/weibull.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/weibull.cpython-311.pyc index 52aa7c8e..6da62187 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/weibull.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/weibull.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/distributions/__pycache__/wishart.cpython-311.pyc b/.venv/Lib/site-packages/torch/distributions/__pycache__/wishart.cpython-311.pyc index a5a5ddaf..2553e86d 100644 Binary files a/.venv/Lib/site-packages/torch/distributions/__pycache__/wishart.cpython-311.pyc and b/.venv/Lib/site-packages/torch/distributions/__pycache__/wishart.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/export/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/export/__pycache__/__init__.cpython-311.pyc index f84ff55d..044218e7 100644 Binary files a/.venv/Lib/site-packages/torch/export/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/export/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/export/__pycache__/_tree_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/export/__pycache__/_tree_utils.cpython-311.pyc index ef4a607f..c12d7fd7 100644 Binary files a/.venv/Lib/site-packages/torch/export/__pycache__/_tree_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/export/__pycache__/_tree_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/export/__pycache__/dynamic_shapes.cpython-311.pyc b/.venv/Lib/site-packages/torch/export/__pycache__/dynamic_shapes.cpython-311.pyc index e6d2084c..d8fd0d91 100644 Binary files a/.venv/Lib/site-packages/torch/export/__pycache__/dynamic_shapes.cpython-311.pyc and b/.venv/Lib/site-packages/torch/export/__pycache__/dynamic_shapes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/export/__pycache__/exported_program.cpython-311.pyc b/.venv/Lib/site-packages/torch/export/__pycache__/exported_program.cpython-311.pyc index 661dba1e..61e50aa8 100644 Binary files a/.venv/Lib/site-packages/torch/export/__pycache__/exported_program.cpython-311.pyc and b/.venv/Lib/site-packages/torch/export/__pycache__/exported_program.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/export/__pycache__/graph_signature.cpython-311.pyc b/.venv/Lib/site-packages/torch/export/__pycache__/graph_signature.cpython-311.pyc index b07caea6..f4240242 100644 Binary files a/.venv/Lib/site-packages/torch/export/__pycache__/graph_signature.cpython-311.pyc and b/.venv/Lib/site-packages/torch/export/__pycache__/graph_signature.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/export/__pycache__/unflatten.cpython-311.pyc b/.venv/Lib/site-packages/torch/export/__pycache__/unflatten.cpython-311.pyc index 3e738ac5..594b5f4e 100644 Binary files a/.venv/Lib/site-packages/torch/export/__pycache__/unflatten.cpython-311.pyc and b/.venv/Lib/site-packages/torch/export/__pycache__/unflatten.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fft/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/fft/__pycache__/__init__.cpython-311.pyc index 4a86ba5c..df1bd518 100644 Binary files a/.venv/Lib/site-packages/torch/fft/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fft/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/func/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/func/__pycache__/__init__.cpython-311.pyc index 339bd1a7..418391a5 100644 Binary files a/.venv/Lib/site-packages/torch/func/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/func/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/futures/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/futures/__pycache__/__init__.cpython-311.pyc index 45ab4ae0..0367570c 100644 Binary files a/.venv/Lib/site-packages/torch/futures/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/futures/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/__init__.cpython-311.pyc index 801cdd60..f93b37bd 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/_compatibility.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/_compatibility.cpython-311.pyc index d15fc6c0..5eda0055 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/_compatibility.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/_compatibility.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/_lazy_graph_module.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/_lazy_graph_module.cpython-311.pyc index 82335f4c..58064e7d 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/_lazy_graph_module.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/_lazy_graph_module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/_pytree.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/_pytree.cpython-311.pyc index 723763a8..47fd3999 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/_pytree.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/_pytree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/_symbolic_trace.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/_symbolic_trace.cpython-311.pyc index a4be9754..5628bc97 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/_symbolic_trace.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/_symbolic_trace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/config.cpython-311.pyc index 258550ca..e2a10988 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/graph.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/graph.cpython-311.pyc index 623c4db1..8e348b09 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/graph.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/graph_module.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/graph_module.cpython-311.pyc index 91198c67..206615ad 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/graph_module.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/graph_module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/immutable_collections.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/immutable_collections.cpython-311.pyc index 243faccf..db48f860 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/immutable_collections.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/immutable_collections.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/interpreter.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/interpreter.cpython-311.pyc index 6a58cfdc..d120f505 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/interpreter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/interpreter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/node.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/node.cpython-311.pyc index 36ef55ea..f0e17ded 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/node.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/node.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/operator_schemas.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/operator_schemas.cpython-311.pyc index ae1279ec..93a49bfc 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/operator_schemas.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/operator_schemas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/proxy.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/proxy.cpython-311.pyc index e2714315..a7faadee 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/proxy.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/subgraph_rewriter.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/subgraph_rewriter.cpython-311.pyc index 07ef0b79..91ceb9e4 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/subgraph_rewriter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/subgraph_rewriter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/__pycache__/traceback.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/__pycache__/traceback.cpython-311.pyc index be23c1da..00f2141d 100644 Binary files a/.venv/Lib/site-packages/torch/fx/__pycache__/traceback.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/__pycache__/traceback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/__init__.cpython-311.pyc index ded73cf4..57a5aa05 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_backward_state.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_backward_state.cpython-311.pyc index 57b7b3bb..a2b7ce3f 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_backward_state.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_backward_state.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_config.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_config.cpython-311.pyc index 99bd2bd3..3ad427d0 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_config.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_sym_dispatch_mode.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_sym_dispatch_mode.cpython-311.pyc index 0a448977..23752510 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_sym_dispatch_mode.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/_sym_dispatch_mode.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/const_fold.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/const_fold.cpython-311.pyc index 76ebb11a..621e1336 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/const_fold.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/const_fold.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/proxy_tensor.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/proxy_tensor.cpython-311.pyc index 5c7ee42d..6d4dfb59 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/proxy_tensor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/proxy_tensor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/recording.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/recording.cpython-311.pyc index b8a95892..698acb56 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/recording.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/recording.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/sym_node.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/sym_node.cpython-311.pyc index 8964639a..296d6d9a 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/sym_node.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/sym_node.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/symbolic_shapes.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/symbolic_shapes.cpython-311.pyc index a31c7c2b..8d47f083 100644 Binary files a/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/symbolic_shapes.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/experimental/__pycache__/symbolic_shapes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/__init__.cpython-311.pyc index 2eee5831..d4a8b598 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/fake_tensor_prop.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/fake_tensor_prop.cpython-311.pyc index 6e971a17..4dfbf57b 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/fake_tensor_prop.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/fake_tensor_prop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_drawer.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_drawer.cpython-311.pyc index dd842f73..15bdc06f 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_drawer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_drawer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_manipulation.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_manipulation.cpython-311.pyc index ea1a21fc..a32b5e88 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_manipulation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/graph_manipulation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/net_min_base.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/net_min_base.cpython-311.pyc index 3fa3c3b7..47b2c22e 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/net_min_base.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/net_min_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/operator_support.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/operator_support.cpython-311.pyc index 2829aa40..b0ef6024 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/operator_support.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/operator_support.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/param_fetch.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/param_fetch.cpython-311.pyc index c233610b..bf70ef02 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/param_fetch.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/param_fetch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/reinplace.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/reinplace.cpython-311.pyc index 37012f54..6986e86f 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/reinplace.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/reinplace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/shape_prop.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/shape_prop.cpython-311.pyc index 35dc5e4c..ea31f304 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/shape_prop.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/shape_prop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_module.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_module.cpython-311.pyc index 31aa7841..f0a3ca1b 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_module.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_utils.cpython-311.pyc index 5b811f13..45d9cc39 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/split_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/splitter_base.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/splitter_base.cpython-311.pyc index 79090e43..57f7d501 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/splitter_base.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/splitter_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/tools_common.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/tools_common.cpython-311.pyc index 4d2aaea9..29ae5e1b 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/__pycache__/tools_common.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/__pycache__/tools_common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/__init__.cpython-311.pyc index 214b4e0c..9184f149 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_base.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_base.cpython-311.pyc index 27e33a87..8b86ebde 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_base.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_manager.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_manager.cpython-311.pyc index 2a7df52c..29b4918e 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_manager.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/infra/__pycache__/pass_manager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/__init__.cpython-311.pyc index 1e67f340..31c6924e 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/common.cpython-311.pyc index 8116281b..e17a2598 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/matcher_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/matcher_utils.cpython-311.pyc index af0d7e6e..13ed0041 100644 Binary files a/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/matcher_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/fx/passes/utils/__pycache__/matcher_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/__init__.cpython-311.pyc index 51488612..2802ba06 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_async.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_async.cpython-311.pyc index 8c90f6e7..dc14a919 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_async.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_async.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_await.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_await.cpython-311.pyc index 255f6b24..de07cd39 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_await.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_await.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_builtins.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_builtins.cpython-311.pyc index aec5cba8..bc31b586 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_builtins.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_builtins.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_check.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_check.cpython-311.pyc index 2d0e4ab3..8f1fbcfc 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_check.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_check.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_dataclass_impls.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_dataclass_impls.cpython-311.pyc index 5dedac95..5c294694 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_dataclass_impls.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_dataclass_impls.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_decomposition_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_decomposition_utils.cpython-311.pyc index 0abe00e5..8d88e1d4 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_decomposition_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_decomposition_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_freeze.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_freeze.cpython-311.pyc index 4a5c79fa..c5663b74 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_freeze.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_freeze.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_fuser.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_fuser.cpython-311.pyc index b709eb40..90171e5f 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_fuser.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_fuser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_ir_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_ir_utils.cpython-311.pyc index ac913b5a..54395a73 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_ir_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_ir_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_monkeytype_config.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_monkeytype_config.cpython-311.pyc index b1296895..a742f56b 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_monkeytype_config.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_monkeytype_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_recursive.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_recursive.cpython-311.pyc index 7041ad6a..2f17a2d9 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_recursive.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_recursive.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_script.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_script.cpython-311.pyc index 3fd5a32f..b4fde23f 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_script.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_script.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_serialization.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_serialization.cpython-311.pyc index 0a15ba92..994a5984 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_serialization.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_serialization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_state.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_state.cpython-311.pyc index c0f0494c..53b26bbf 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_state.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_state.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/_trace.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/_trace.cpython-311.pyc index 4e631ae2..83436c5c 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/_trace.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/_trace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/annotations.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/annotations.cpython-311.pyc index 141fc537..1247cd0d 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/annotations.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/annotations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/jit/__pycache__/frontend.cpython-311.pyc b/.venv/Lib/site-packages/torch/jit/__pycache__/frontend.cpython-311.pyc index 8b34be05..e6f1baaa 100644 Binary files a/.venv/Lib/site-packages/torch/jit/__pycache__/frontend.cpython-311.pyc and b/.venv/Lib/site-packages/torch/jit/__pycache__/frontend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/linalg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/linalg/__pycache__/__init__.cpython-311.pyc index a74a1571..49f89912 100644 Binary files a/.venv/Lib/site-packages/torch/linalg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/linalg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/__pycache__/__init__.cpython-311.pyc index 3cb45c3e..5133dbe7 100644 Binary files a/.venv/Lib/site-packages/torch/masked/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/__pycache__/_docs.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/__pycache__/_docs.cpython-311.pyc index 404ccb1b..f85f5962 100644 Binary files a/.venv/Lib/site-packages/torch/masked/__pycache__/_docs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/__pycache__/_docs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/__pycache__/_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/__pycache__/_ops.cpython-311.pyc index 0e746f39..f31fc3f8 100644 Binary files a/.venv/Lib/site-packages/torch/masked/__pycache__/_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/__pycache__/_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/__init__.cpython-311.pyc index 4df2d8b5..4683facf 100644 Binary files a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/binary.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/binary.cpython-311.pyc index e4f1b438..4ecf510f 100644 Binary files a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/binary.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/binary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/core.cpython-311.pyc index e299c9de..d86dc303 100644 Binary files a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/creation.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/creation.cpython-311.pyc index bd15eaaf..ddb65394 100644 Binary files a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/creation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/creation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/passthrough.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/passthrough.cpython-311.pyc index fde6f6d7..672edca0 100644 Binary files a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/passthrough.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/passthrough.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/reductions.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/reductions.cpython-311.pyc index 4203927e..a1333a30 100644 Binary files a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/reductions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/reductions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/unary.cpython-311.pyc b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/unary.cpython-311.pyc index c3e8c16d..34f26574 100644 Binary files a/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/unary.cpython-311.pyc and b/.venv/Lib/site-packages/torch/masked/maskedtensor/__pycache__/unary.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/mps/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/mps/__pycache__/__init__.cpython-311.pyc index c66a9f71..f1b58f39 100644 Binary files a/.venv/Lib/site-packages/torch/mps/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/mps/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/mps/__pycache__/event.cpython-311.pyc b/.venv/Lib/site-packages/torch/mps/__pycache__/event.cpython-311.pyc index bec89a0f..db1bd4e8 100644 Binary files a/.venv/Lib/site-packages/torch/mps/__pycache__/event.cpython-311.pyc and b/.venv/Lib/site-packages/torch/mps/__pycache__/event.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/mps/__pycache__/profiler.cpython-311.pyc b/.venv/Lib/site-packages/torch/mps/__pycache__/profiler.cpython-311.pyc index 58cbb291..560d9f71 100644 Binary files a/.venv/Lib/site-packages/torch/mps/__pycache__/profiler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/mps/__pycache__/profiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/__init__.cpython-311.pyc index 348aed76..5c7c3c32 100644 Binary files a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/_atfork.cpython-311.pyc b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/_atfork.cpython-311.pyc index 0d25e22b..5bb80b75 100644 Binary files a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/_atfork.cpython-311.pyc and b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/_atfork.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/reductions.cpython-311.pyc b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/reductions.cpython-311.pyc index 96de49b5..445b3ceb 100644 Binary files a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/reductions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/reductions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/spawn.cpython-311.pyc b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/spawn.cpython-311.pyc index d08ea433..952845dc 100644 Binary files a/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/spawn.cpython-311.pyc and b/.venv/Lib/site-packages/torch/multiprocessing/__pycache__/spawn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nested/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nested/__pycache__/__init__.cpython-311.pyc index f9d3e90e..076151a2 100644 Binary files a/.venv/Lib/site-packages/torch/nested/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nested/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/__init__.cpython-311.pyc index e07e7c5a..531007e4 100644 Binary files a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/nested_tensor.cpython-311.pyc b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/nested_tensor.cpython-311.pyc index 967adc52..e7334c29 100644 Binary files a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/nested_tensor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/nested_tensor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/ops.cpython-311.pyc index a0c3f24c..f06c2f3b 100644 Binary files a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/sdpa.cpython-311.pyc b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/sdpa.cpython-311.pyc index b519e79f..c9140bda 100644 Binary files a/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/sdpa.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nested/_internal/__pycache__/sdpa.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/__pycache__/__init__.cpython-311.pyc index 2fe3735b..4a2b7dc7 100644 Binary files a/.venv/Lib/site-packages/torch/nn/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/__pycache__/_reduction.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/__pycache__/_reduction.cpython-311.pyc index 9bcbc9ca..34aa0fb9 100644 Binary files a/.venv/Lib/site-packages/torch/nn/__pycache__/_reduction.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/__pycache__/_reduction.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/__pycache__/common_types.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/__pycache__/common_types.cpython-311.pyc index c4e81680..de754e8b 100644 Binary files a/.venv/Lib/site-packages/torch/nn/__pycache__/common_types.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/__pycache__/common_types.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/__pycache__/functional.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/__pycache__/functional.cpython-311.pyc index e149c3f4..c619baee 100644 Binary files a/.venv/Lib/site-packages/torch/nn/__pycache__/functional.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/__pycache__/functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/__pycache__/grad.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/__pycache__/grad.cpython-311.pyc index aaefff70..8ca74b0a 100644 Binary files a/.venv/Lib/site-packages/torch/nn/__pycache__/grad.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/__pycache__/grad.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/__pycache__/init.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/__pycache__/init.cpython-311.pyc index 8454cfc2..b7e84eb7 100644 Binary files a/.venv/Lib/site-packages/torch/nn/__pycache__/init.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/__pycache__/init.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/__pycache__/parameter.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/__pycache__/parameter.cpython-311.pyc index 015d3609..f22aa687 100644 Binary files a/.venv/Lib/site-packages/torch/nn/__pycache__/parameter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/__pycache__/parameter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/attention/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/attention/__pycache__/__init__.cpython-311.pyc index 5b6a4685..0df17131 100644 Binary files a/.venv/Lib/site-packages/torch/nn/attention/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/attention/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/__pycache__/__init__.cpython-311.pyc index 68dfca4c..bd1fde59 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc index 4124f75c..6ad59385 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc index bc718e2c..9ed5fe45 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/modules/__pycache__/fused.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc index a1a689db..e1cc8e8c 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc index 3ec1873b..0e98b754 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc index 36b0c653..15486a21 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc index a7f77cd5..777dfb71 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc index c60373cd..720996ab 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc index 7c90e67e..eee9a61f 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc index 557acfae..58734b9d 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc index 4559f1e9..31a38531 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc index 39de26fc..adf6c8ee 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc index e4cd7ed8..803caa4c 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc index ef55b2ba..4dae8968 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc index 39e5c0a5..88e88915 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc index 952f1598..187a5d01 100644 Binary files a/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/__init__.cpython-311.pyc index b96df654..4c2c536b 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/_functions.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/_functions.cpython-311.pyc index e23ffd8f..3c7aabdb 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/_functions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/activation.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/activation.cpython-311.pyc index 5ad74da6..8e80da37 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/activation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/activation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/adaptive.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/adaptive.cpython-311.pyc index 9fce71a7..f7d2db03 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/adaptive.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/adaptive.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/batchnorm.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/batchnorm.cpython-311.pyc index d8052661..f7b910da 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/batchnorm.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/batchnorm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/channelshuffle.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/channelshuffle.cpython-311.pyc index 57699351..1b224c97 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/channelshuffle.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/channelshuffle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/container.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/container.cpython-311.pyc index 3b35b1d7..9d870e2e 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/container.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/container.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/conv.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/conv.cpython-311.pyc index a8e184fa..b5c01b04 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/conv.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/conv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/distance.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/distance.cpython-311.pyc index d0c67787..38fa4501 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/distance.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/distance.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/dropout.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/dropout.cpython-311.pyc index b6bd4d90..9ccce7ab 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/dropout.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/dropout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/flatten.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/flatten.cpython-311.pyc index fb1b0f01..7ec90bbb 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/flatten.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/flatten.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/fold.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/fold.cpython-311.pyc index 9ad9ddf3..29e525b1 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/fold.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/fold.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/instancenorm.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/instancenorm.cpython-311.pyc index b43647d5..7b64aee3 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/instancenorm.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/instancenorm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/lazy.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/lazy.cpython-311.pyc index bbe4ad00..d16f6d0d 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/lazy.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/lazy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/linear.cpython-311.pyc index c9b182f7..9ac6da78 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/loss.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/loss.cpython-311.pyc index a49266e7..7a0cd3e0 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/loss.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/loss.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/module.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/module.cpython-311.pyc index 05f20e51..da250005 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/module.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/normalization.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/normalization.cpython-311.pyc index 08b5456d..b57abfaf 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/normalization.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/normalization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/padding.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/padding.cpython-311.pyc index 7eb57b01..be448bf5 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/padding.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/padding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pixelshuffle.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pixelshuffle.cpython-311.pyc index ef809ffd..27192f9d 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pixelshuffle.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pixelshuffle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pooling.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pooling.cpython-311.pyc index 2df866d4..a5c9e10e 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pooling.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/pooling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/rnn.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/rnn.cpython-311.pyc index dd1387ed..205f81b6 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/rnn.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/rnn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/sparse.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/sparse.cpython-311.pyc index 9ef6edc3..c43c2253 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/sparse.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/sparse.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/transformer.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/transformer.cpython-311.pyc index 9fc17f2d..a54cfd1f 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/transformer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/transformer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/upsampling.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/upsampling.cpython-311.pyc index 0d31400e..204876b9 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/upsampling.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/upsampling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/utils.cpython-311.pyc index 03fc1aec..31b953e0 100644 Binary files a/.venv/Lib/site-packages/torch/nn/modules/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/modules/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/__init__.cpython-311.pyc index 45449d7a..3440dae9 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/_functions.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/_functions.cpython-311.pyc index f9b81399..07e982d4 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/_functions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/comm.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/comm.cpython-311.pyc index a0bbc3e3..d5114f12 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/comm.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/comm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/data_parallel.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/data_parallel.cpython-311.pyc index 0608ebb7..777634ff 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/data_parallel.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/data_parallel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/distributed.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/distributed.cpython-311.pyc index cb58df9f..f7c85675 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/distributed.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/distributed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/parallel_apply.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/parallel_apply.cpython-311.pyc index 41641ecf..a18f8010 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/parallel_apply.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/parallel_apply.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/replicate.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/replicate.cpython-311.pyc index fcd8642b..e9b9257b 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/replicate.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/replicate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/scatter_gather.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/scatter_gather.cpython-311.pyc index b2b9ae0c..2331a4f9 100644 Binary files a/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/scatter_gather.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/parallel/__pycache__/scatter_gather.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/__pycache__/__init__.cpython-311.pyc index e8d79111..5656075d 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc index 55649534..8b9370c0 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/dynamic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc index ec64c634..c3950f00 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc index f39545ed..45556169 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/dynamic/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/__init__.cpython-311.pyc index ab1285fe..2ae2731b 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/conv.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/conv.cpython-311.pyc index 160e5477..5ea4644b 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/conv.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/conv.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc index 4d4d098f..b0834480 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/embedding_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/linear.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/linear.cpython-311.pyc index 553b238b..4345d01b 100644 Binary files a/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/linear.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/qat/modules/__pycache__/linear.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/quantizable/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/quantizable/__pycache__/__init__.cpython-311.pyc index e665031a..64f9fde7 100644 Binary files a/.venv/Lib/site-packages/torch/nn/quantizable/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/quantizable/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc index ecb8b286..26791561 100644 Binary files a/.venv/Lib/site-packages/torch/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/quantizable/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/__init__.cpython-311.pyc index af156993..ddbd6611 100644 Binary files a/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/functional.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/functional.cpython-311.pyc index 47cf2402..88e043fb 100644 Binary files a/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/functional.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/quantized/__pycache__/functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc index 0199f9e5..54084cb1 100644 Binary files a/.venv/Lib/site-packages/torch/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/quantized/dynamic/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc index 114696d6..07ff6cbd 100644 Binary files a/.venv/Lib/site-packages/torch/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/quantized/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/__init__.cpython-311.pyc index 38f249fb..fe9dff6f 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/_named_member_accessor.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/_named_member_accessor.cpython-311.pyc index d041ffd0..5a1e2718 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/_named_member_accessor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/_named_member_accessor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/clip_grad.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/clip_grad.cpython-311.pyc index d49ea370..eec1679b 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/clip_grad.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/clip_grad.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/convert_parameters.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/convert_parameters.cpython-311.pyc index 8baae068..1fae501e 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/convert_parameters.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/convert_parameters.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/fusion.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/fusion.cpython-311.pyc index b69863e4..9850a513 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/fusion.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/fusion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/init.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/init.cpython-311.pyc index 21f59c7c..193eacd2 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/init.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/init.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/memory_format.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/memory_format.cpython-311.pyc index ed42e42f..c1178e5a 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/memory_format.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/memory_format.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrizations.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrizations.cpython-311.pyc index f14a6dc4..8db1a6dd 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrizations.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrizations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrize.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrize.cpython-311.pyc index cb412ffa..db77362c 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrize.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/parametrize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/rnn.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/rnn.cpython-311.pyc index 89708686..633425c9 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/rnn.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/rnn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/spectral_norm.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/spectral_norm.cpython-311.pyc index 000ca2c7..f4b6a73c 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/spectral_norm.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/spectral_norm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/stateless.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/stateless.cpython-311.pyc index e9c33331..64d96920 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/stateless.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/stateless.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/weight_norm.cpython-311.pyc b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/weight_norm.cpython-311.pyc index c6eb5955..fdaf652e 100644 Binary files a/.venv/Lib/site-packages/torch/nn/utils/__pycache__/weight_norm.cpython-311.pyc and b/.venv/Lib/site-packages/torch/nn/utils/__pycache__/weight_norm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/__init__.cpython-311.pyc index b88f1e76..82704199 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/_constants.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/_constants.cpython-311.pyc index 2325b2ee..6fd5fa6a 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/_constants.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/_constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/_deprecation.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/_deprecation.cpython-311.pyc index 445cdbf0..c93133aa 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/_deprecation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/_deprecation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/_exporter_states.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/_exporter_states.cpython-311.pyc index 5c9d6364..df246fe5 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/_exporter_states.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/_exporter_states.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/_globals.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/_globals.cpython-311.pyc index aa4ce06a..df226aa6 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/_globals.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/_globals.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/_type_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/_type_utils.cpython-311.pyc index 73c1fdf5..6fcfe1a9 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/_type_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/_type_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/errors.cpython-311.pyc index b139818d..f345fe18 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/operators.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/operators.cpython-311.pyc index 6aaa028f..5ae8919e 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/operators.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/operators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_caffe2.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_caffe2.cpython-311.pyc index 7c15415c..26ad652a 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_caffe2.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_caffe2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_helper.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_helper.cpython-311.pyc index 591c5752..5038148d 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_helper.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_helper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset10.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset10.cpython-311.pyc index fe58eb4e..0d751d60 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset10.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset10.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset11.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset11.cpython-311.pyc index 0eb3fa3b..4a8c215b 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset11.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset11.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset12.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset12.cpython-311.pyc index 8bea059e..8684bc80 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset12.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset12.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset13.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset13.cpython-311.pyc index 7bb2e96b..7107d510 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset13.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset13.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset14.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset14.cpython-311.pyc index e0b6b4bc..d6263295 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset14.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset14.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset15.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset15.cpython-311.pyc index 0e95a4aa..0c76dba0 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset15.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset15.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset16.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset16.cpython-311.pyc index 75b9462e..11192c2f 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset16.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset16.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset17.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset17.cpython-311.pyc index 8b324190..00617bcb 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset17.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset17.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset18.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset18.cpython-311.pyc index 738553b3..6602dfb9 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset18.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset18.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset7.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset7.cpython-311.pyc index 49490329..835729e9 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset7.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset7.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset8.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset8.cpython-311.pyc index 8a8feac6..d8de6cdd 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset8.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset8.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset9.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset9.cpython-311.pyc index b4543ad9..66f412d7 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset9.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset9.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/__pycache__/utils.cpython-311.pyc index 319401f9..073046fe 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/__init__.cpython-311.pyc index 14a070e2..720cd445 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/_beartype.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/_beartype.cpython-311.pyc index c73c9469..4dbd65eb 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/_beartype.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/_beartype.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/exporter.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/exporter.cpython-311.pyc index d96c771c..cee96490 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/exporter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/exporter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/io_adapter.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/io_adapter.cpython-311.pyc index 106c2928..ddab054e 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/io_adapter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/io_adapter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/jit_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/jit_utils.cpython-311.pyc index 7e20fc4d..37452ab1 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/jit_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/jit_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnx_proto_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnx_proto_utils.cpython-311.pyc index 009af4b1..393decce 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnx_proto_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnx_proto_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnxruntime.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnxruntime.cpython-311.pyc index 65fcc454..72a2a142 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnxruntime.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/onnxruntime.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/registration.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/registration.cpython-311.pyc index 639ecb47..425ae504 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/registration.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/__pycache__/registration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/__init__.cpython-311.pyc index 33c79501..cf72d5b5 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_diagnostic.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_diagnostic.cpython-311.pyc index d06c41aa..3112b2ce 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_diagnostic.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_diagnostic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_rules.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_rules.cpython-311.pyc index 7429ded6..9ba8f9e4 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_rules.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/__pycache__/_rules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/__init__.cpython-311.pyc index 838cf463..c13b2d67 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/_infra.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/_infra.cpython-311.pyc index c87ddf41..37481b73 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/_infra.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/_infra.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/context.cpython-311.pyc index ff405e01..44b8de60 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/context.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/context.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/formatter.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/formatter.cpython-311.pyc index 8f8f6004..41990960 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/formatter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/formatter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/utils.cpython-311.pyc index 9e68a6c5..641653aa 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/__init__.cpython-311.pyc index 8dfb1287..449f50e1 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_address.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_address.cpython-311.pyc index cc97583a..411105eb 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_address.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_address.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact.cpython-311.pyc index 1a5df3a0..83c04c4f 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_change.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_change.cpython-311.pyc index 5e391f64..5700bcb5 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_change.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_change.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_content.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_content.cpython-311.pyc index f871224a..e1c53a87 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_content.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_content.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_location.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_location.cpython-311.pyc index 97ab1198..416763af 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_location.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_location.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_attachment.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_attachment.cpython-311.pyc index a3d3c4bb..945399b9 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_attachment.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_attachment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_code_flow.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_code_flow.cpython-311.pyc index afe2cc3c..83e8b7ef 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_code_flow.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_code_flow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_configuration_override.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_configuration_override.cpython-311.pyc index a1d2cdc3..c78af379 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_configuration_override.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_configuration_override.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_conversion.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_conversion.cpython-311.pyc index b9af77c2..cebc8080 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_conversion.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_conversion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge.cpython-311.pyc index 31b8503c..7580859b 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge_traversal.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge_traversal.cpython-311.pyc index 5ea6bd27..32c9e4d8 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge_traversal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge_traversal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_exception.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_exception.cpython-311.pyc index 4dbcc6ae..5cf58eed 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_exception.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_exception.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_properties.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_properties.cpython-311.pyc index f5b457c8..5159ba12 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_properties.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_properties.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_reference.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_reference.cpython-311.pyc index b74cfdbc..bb9300b3 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_reference.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_reference.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_references.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_references.cpython-311.pyc index dacb4a1d..820d6161 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_references.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_references.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_fix.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_fix.cpython-311.pyc index 780da833..ec01975b 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_fix.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_fix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph.cpython-311.pyc index 80407ef2..de92d6c6 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph_traversal.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph_traversal.cpython-311.pyc index f5805017..ff23b329 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph_traversal.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph_traversal.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_invocation.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_invocation.cpython-311.pyc index e685f577..6b043dbd 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_invocation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_invocation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location.cpython-311.pyc index f3a1b060..368c446e 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location_relationship.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location_relationship.cpython-311.pyc index bbae7a7e..e412be36 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location_relationship.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location_relationship.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_logical_location.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_logical_location.cpython-311.pyc index f21f3d28..2dda7f1f 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_logical_location.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_logical_location.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_message.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_message.cpython-311.pyc index db1d8d1c..99fa87b6 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_message.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_message.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_multiformat_message_string.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_multiformat_message_string.cpython-311.pyc index abbc0398..b9723897 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_multiformat_message_string.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_multiformat_message_string.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_node.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_node.cpython-311.pyc index 705ab7c7..f3124f70 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_node.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_node.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_notification.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_notification.cpython-311.pyc index 66b49e37..0c01d640 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_notification.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_notification.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_physical_location.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_physical_location.cpython-311.pyc index 7c183a8d..3a180a46 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_physical_location.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_physical_location.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_property_bag.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_property_bag.cpython-311.pyc index 285a87c8..9e76483b 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_property_bag.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_property_bag.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_rectangle.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_rectangle.cpython-311.pyc index fabb335d..d740865e 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_rectangle.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_rectangle.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_region.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_region.cpython-311.pyc index b82e4070..0fcad496 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_region.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_region.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_replacement.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_replacement.cpython-311.pyc index 60e71305..3e23e9a5 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_replacement.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_replacement.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_configuration.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_configuration.cpython-311.pyc index 8901d5f8..9d0ac4e3 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_configuration.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_configuration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor.cpython-311.pyc index 354b457f..4545bb41 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_reference.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_reference.cpython-311.pyc index 5c40c994..41855ea3 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_reference.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_reference.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_relationship.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_relationship.cpython-311.pyc index c0ec57cc..1f9b934b 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_relationship.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_relationship.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result.cpython-311.pyc index b8c38237..f5c65895 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result_provenance.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result_provenance.cpython-311.pyc index ffca7a97..0c6ef4c2 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result_provenance.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result_provenance.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run.cpython-311.pyc index c2965fd8..d54477ea 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run_automation_details.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run_automation_details.cpython-311.pyc index 0ade3afc..b0088aca 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run_automation_details.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run_automation_details.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_sarif_log.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_sarif_log.cpython-311.pyc index a8ff05c3..233d50f0 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_sarif_log.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_sarif_log.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_special_locations.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_special_locations.cpython-311.pyc index 8660a9ba..26b3171a 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_special_locations.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_special_locations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack.cpython-311.pyc index 8f1a3d7a..fcc9dfec 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack_frame.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack_frame.cpython-311.pyc index 36fecf33..fc4516a8 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack_frame.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack_frame.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_suppression.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_suppression.cpython-311.pyc index e0e044bf..ea7f85fe 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_suppression.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_suppression.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow.cpython-311.pyc index 1d47916a..4f3d3ae0 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow_location.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow_location.cpython-311.pyc index 6ffa0485..56f37356 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow_location.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow_location.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool.cpython-311.pyc index 1d1193b9..0a786114 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component.cpython-311.pyc index 6f506284..15a31854 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component_reference.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component_reference.cpython-311.pyc index d3c0a2c3..b901b2e6 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component_reference.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component_reference.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_translation_metadata.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_translation_metadata.cpython-311.pyc index de6480f7..8353db55 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_translation_metadata.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_translation_metadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_version_control_details.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_version_control_details.cpython-311.pyc index 62b9440c..b94b2744 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_version_control_details.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_version_control_details.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_request.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_request.cpython-311.pyc index a8e71dbd..d0bd8eeb 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_request.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_request.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_response.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_response.cpython-311.pyc index 06721ba1..7a7b1ea1 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_response.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/version.cpython-311.pyc index a91bef25..3e83506d 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/__init__.cpython-311.pyc index 25ee0d2e..1158aaad 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/decomposition_table.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/decomposition_table.cpython-311.pyc index 787b6b24..d627e31d 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/decomposition_table.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/decomposition_table.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/diagnostics.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/diagnostics.cpython-311.pyc index 708dd29f..605b30e1 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/diagnostics.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/diagnostics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/patcher.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/patcher.cpython-311.pyc index 91d695d1..a8826053 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/patcher.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/patcher.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/registration.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/registration.cpython-311.pyc index 4b599019..ddfe6702 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/registration.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/registration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/serialization.cpython-311.pyc b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/serialization.cpython-311.pyc index 6a61b35f..10674556 100644 Binary files a/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/serialization.cpython-311.pyc and b/.venv/Lib/site-packages/torch/onnx/_internal/fx/__pycache__/serialization.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/__init__.cpython-311.pyc index c6f9b215..e62064f2 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/_functional.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/_functional.cpython-311.pyc index 8b3ea683..e2bf9192 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/_functional.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/_functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/adadelta.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/adadelta.cpython-311.pyc index c726fbf7..964fff27 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/adadelta.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/adadelta.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/adagrad.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/adagrad.cpython-311.pyc index e8524bde..0c07b3ba 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/adagrad.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/adagrad.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/adam.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/adam.cpython-311.pyc index 02386e08..a2769719 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/adam.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/adam.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/adamax.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/adamax.cpython-311.pyc index b8bd897e..4e9ce4f1 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/adamax.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/adamax.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/adamw.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/adamw.cpython-311.pyc index 16c2ca5c..e7b635c3 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/adamw.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/adamw.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/asgd.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/asgd.cpython-311.pyc index a974035c..b50535d5 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/asgd.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/asgd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/lbfgs.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/lbfgs.cpython-311.pyc index 9ab04336..3c10d8a2 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/lbfgs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/lbfgs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/lr_scheduler.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/lr_scheduler.cpython-311.pyc index 98c35f44..52aa2c3d 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/lr_scheduler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/lr_scheduler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/nadam.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/nadam.cpython-311.pyc index e44e1409..98d8ebda 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/nadam.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/nadam.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/optimizer.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/optimizer.cpython-311.pyc index 3540df62..71ea2b8a 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/optimizer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/optimizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/radam.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/radam.cpython-311.pyc index d07ced19..8bc9288d 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/radam.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/radam.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/rmsprop.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/rmsprop.cpython-311.pyc index 4ef7c57f..51acee43 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/rmsprop.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/rmsprop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/rprop.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/rprop.cpython-311.pyc index aba09699..59fdddf5 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/rprop.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/rprop.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/sgd.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/sgd.cpython-311.pyc index 15cd7cee..c06630fc 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/sgd.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/sgd.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/sparse_adam.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/sparse_adam.cpython-311.pyc index f7ff92ec..5aa46865 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/sparse_adam.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/sparse_adam.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/__pycache__/swa_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/__pycache__/swa_utils.cpython-311.pyc index 721c262a..56bd0b29 100644 Binary files a/.venv/Lib/site-packages/torch/optim/__pycache__/swa_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/__pycache__/swa_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/optim/_multi_tensor/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/optim/_multi_tensor/__pycache__/__init__.cpython-311.pyc index 21a17c83..2ddac682 100644 Binary files a/.venv/Lib/site-packages/torch/optim/_multi_tensor/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/optim/_multi_tensor/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/__init__.cpython-311.pyc index 3a4d31d2..e0e9cd65 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/_digraph.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/_digraph.cpython-311.pyc index 4d4883e1..ae14633b 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/_digraph.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/_digraph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/_directory_reader.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/_directory_reader.cpython-311.pyc index 435cf19b..3095b568 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/_directory_reader.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/_directory_reader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/_importlib.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/_importlib.cpython-311.pyc index 6d445865..05e723d7 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/_importlib.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/_importlib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/_mangling.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/_mangling.cpython-311.pyc index 9936a0a4..05c42cff 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/_mangling.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/_mangling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/_package_pickler.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/_package_pickler.cpython-311.pyc index 8d87e031..53124bc2 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/_package_pickler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/_package_pickler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/_package_unpickler.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/_package_unpickler.cpython-311.pyc index dbfca586..33022446 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/_package_unpickler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/_package_unpickler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/_stdlib.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/_stdlib.cpython-311.pyc index 3c454d51..ec79bd5e 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/_stdlib.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/_stdlib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/file_structure_representation.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/file_structure_representation.cpython-311.pyc index 6b1c23eb..a08e564f 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/file_structure_representation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/file_structure_representation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/find_file_dependencies.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/find_file_dependencies.cpython-311.pyc index a78f6435..c140ed5d 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/find_file_dependencies.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/find_file_dependencies.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/glob_group.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/glob_group.cpython-311.pyc index b437c2b0..de8c7a2e 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/glob_group.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/glob_group.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/importer.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/importer.cpython-311.pyc index 04b9cfb3..f354bbbd 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/importer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/importer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/package_exporter.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/package_exporter.cpython-311.pyc index 033e8501..c9135bbc 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/package_exporter.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/package_exporter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/__pycache__/package_importer.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/__pycache__/package_importer.cpython-311.pyc index 7d21bbe7..43049a7f 100644 Binary files a/.venv/Lib/site-packages/torch/package/__pycache__/package_importer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/__pycache__/package_importer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/__init__.cpython-311.pyc index 9840f6e3..a2307bbe 100644 Binary files a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/find_first_use_of_broken_modules.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/find_first_use_of_broken_modules.cpython-311.pyc index 459e04e1..b4d00fdf 100644 Binary files a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/find_first_use_of_broken_modules.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/find_first_use_of_broken_modules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/is_from_package.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/is_from_package.cpython-311.pyc index f0aecb27..a214c737 100644 Binary files a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/is_from_package.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/is_from_package.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/trace_dependencies.cpython-311.pyc b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/trace_dependencies.cpython-311.pyc index 30f65d40..ab1d3992 100644 Binary files a/.venv/Lib/site-packages/torch/package/analyze/__pycache__/trace_dependencies.cpython-311.pyc and b/.venv/Lib/site-packages/torch/package/analyze/__pycache__/trace_dependencies.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/profiler/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/profiler/__pycache__/__init__.cpython-311.pyc index e3f87b84..62871185 100644 Binary files a/.venv/Lib/site-packages/torch/profiler/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/profiler/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/profiler/__pycache__/_memory_profiler.cpython-311.pyc b/.venv/Lib/site-packages/torch/profiler/__pycache__/_memory_profiler.cpython-311.pyc index 8fba8998..c9eb1f09 100644 Binary files a/.venv/Lib/site-packages/torch/profiler/__pycache__/_memory_profiler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/profiler/__pycache__/_memory_profiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/profiler/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/profiler/__pycache__/_utils.cpython-311.pyc index 4bd409e1..09d8159a 100644 Binary files a/.venv/Lib/site-packages/torch/profiler/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/profiler/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/profiler/__pycache__/itt.cpython-311.pyc b/.venv/Lib/site-packages/torch/profiler/__pycache__/itt.cpython-311.pyc index dc1434f7..49830f6f 100644 Binary files a/.venv/Lib/site-packages/torch/profiler/__pycache__/itt.cpython-311.pyc and b/.venv/Lib/site-packages/torch/profiler/__pycache__/itt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/profiler/__pycache__/profiler.cpython-311.pyc b/.venv/Lib/site-packages/torch/profiler/__pycache__/profiler.cpython-311.pyc index 960bf8a0..0c5d9652 100644 Binary files a/.venv/Lib/site-packages/torch/profiler/__pycache__/profiler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/profiler/__pycache__/profiler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/__init__.cpython-311.pyc index 74f96d7f..67a4fc48 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/fake_quantize.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/fake_quantize.cpython-311.pyc index 0db0e046..514ddfdd 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/fake_quantize.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/fake_quantize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/fuse_modules.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/fuse_modules.cpython-311.pyc index 37ef0196..15aa2671 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/fuse_modules.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/fuse_modules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc index 01224d2d..d7ee298e 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/fuser_method_mappings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/observer.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/observer.cpython-311.pyc index 48a3f260..40eb5a5a 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/observer.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/observer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/qconfig.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/qconfig.cpython-311.pyc index 7cbaea5a..36558100 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/qconfig.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/qconfig.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/quant_type.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/quant_type.cpython-311.pyc index e601241f..dcfdccbd 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/quant_type.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/quant_type.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/quantization_mappings.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/quantization_mappings.cpython-311.pyc index 0fc74489..ae2118a1 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/quantization_mappings.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/quantization_mappings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize.cpython-311.pyc index d4db1ef0..b8a4d057 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize_jit.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize_jit.cpython-311.pyc index 6f06a7c8..386b3e5e 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize_jit.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/quantize_jit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/quantization/__pycache__/stubs.cpython-311.pyc b/.venv/Lib/site-packages/torch/quantization/__pycache__/stubs.cpython-311.pyc index a5df14b5..dd25b8be 100644 Binary files a/.venv/Lib/site-packages/torch/quantization/__pycache__/stubs.cpython-311.pyc and b/.venv/Lib/site-packages/torch/quantization/__pycache__/stubs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/signal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/signal/__pycache__/__init__.cpython-311.pyc index f4ea94a9..9d1b9cea 100644 Binary files a/.venv/Lib/site-packages/torch/signal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/signal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/signal/windows/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/signal/windows/__pycache__/__init__.cpython-311.pyc index c0f8c644..afdfe685 100644 Binary files a/.venv/Lib/site-packages/torch/signal/windows/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/signal/windows/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/signal/windows/__pycache__/windows.cpython-311.pyc b/.venv/Lib/site-packages/torch/signal/windows/__pycache__/windows.cpython-311.pyc index a56d9ca5..b573d362 100644 Binary files a/.venv/Lib/site-packages/torch/signal/windows/__pycache__/windows.cpython-311.pyc and b/.venv/Lib/site-packages/torch/signal/windows/__pycache__/windows.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/sparse/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/sparse/__pycache__/__init__.cpython-311.pyc index c31fb075..1f55e32e 100644 Binary files a/.venv/Lib/site-packages/torch/sparse/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/sparse/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_conversions.cpython-311.pyc b/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_conversions.cpython-311.pyc index fe1d6bf5..d2db69fa 100644 Binary files a/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_conversions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_conversions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_ops.cpython-311.pyc b/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_ops.cpython-311.pyc index 8fa294fb..406b3378 100644 Binary files a/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_ops.cpython-311.pyc and b/.venv/Lib/site-packages/torch/sparse/__pycache__/_semi_structured_ops.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/sparse/__pycache__/semi_structured.cpython-311.pyc b/.venv/Lib/site-packages/torch/sparse/__pycache__/semi_structured.cpython-311.pyc index 2f81b866..34f3c075 100644 Binary files a/.venv/Lib/site-packages/torch/sparse/__pycache__/semi_structured.cpython-311.pyc and b/.venv/Lib/site-packages/torch/sparse/__pycache__/semi_structured.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/special/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/special/__pycache__/__init__.cpython-311.pyc index 73b5d316..d2aedeb0 100644 Binary files a/.venv/Lib/site-packages/torch/special/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/special/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/testing/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/testing/__pycache__/__init__.cpython-311.pyc index ee63c946..ab2211a7 100644 Binary files a/.venv/Lib/site-packages/torch/testing/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/testing/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/testing/__pycache__/_comparison.cpython-311.pyc b/.venv/Lib/site-packages/torch/testing/__pycache__/_comparison.cpython-311.pyc index d009f6b6..1da4bddf 100644 Binary files a/.venv/Lib/site-packages/torch/testing/__pycache__/_comparison.cpython-311.pyc and b/.venv/Lib/site-packages/torch/testing/__pycache__/_comparison.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/testing/__pycache__/_creation.cpython-311.pyc b/.venv/Lib/site-packages/torch/testing/__pycache__/_creation.cpython-311.pyc index fc718c46..2b08251d 100644 Binary files a/.venv/Lib/site-packages/torch/testing/__pycache__/_creation.cpython-311.pyc and b/.venv/Lib/site-packages/torch/testing/__pycache__/_creation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/__init__.cpython-311.pyc index 945ce788..92659704 100644 Binary files a/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/logging_tensor.cpython-311.pyc b/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/logging_tensor.cpython-311.pyc index fff901f3..4d968bea 100644 Binary files a/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/logging_tensor.cpython-311.pyc and b/.venv/Lib/site-packages/torch/testing/_internal/__pycache__/logging_tensor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/__init__.cpython-311.pyc index 27a6be71..5d574ed1 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_config_module.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_config_module.cpython-311.pyc index 39bb38b2..353a741c 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_config_module.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_config_module.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_content_store.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_content_store.cpython-311.pyc index 84d77ffb..74929fcb 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_content_store.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_content_store.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_contextlib.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_contextlib.cpython-311.pyc index cad756ae..ce5e346d 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_contextlib.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_contextlib.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_cxx_pytree.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_cxx_pytree.cpython-311.pyc index bc7b26a7..f0464f2d 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_cxx_pytree.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_cxx_pytree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_device.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_device.cpython-311.pyc index cdd4b26d..d0561e35 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_device.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_device.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_foreach_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_foreach_utils.cpython-311.pyc index e57d6a69..d42013e1 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_foreach_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_foreach_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_import_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_import_utils.cpython-311.pyc index c12cc39c..7b71b65a 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_import_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_import_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_mode_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_mode_utils.cpython-311.pyc index 212f2fac..b78df48c 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_mode_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_mode_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_python_dispatch.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_python_dispatch.cpython-311.pyc index e7ddde53..33530a77 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_python_dispatch.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_python_dispatch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_pytree.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_pytree.cpython-311.pyc index 01057f6a..892285da 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_pytree.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_pytree.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_stats.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_stats.cpython-311.pyc index 60820423..92c66e8a 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_stats.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_stats.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_traceback.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_traceback.cpython-311.pyc index b7f2f672..9ebd5255 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_traceback.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_traceback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/_typing_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/_typing_utils.cpython-311.pyc index 09861642..3b298e42 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/_typing_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/_typing_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/backend_registration.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/backend_registration.cpython-311.pyc index f6713f5a..016a9705 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/backend_registration.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/backend_registration.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/checkpoint.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/checkpoint.cpython-311.pyc index 976bce43..5232717a 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/checkpoint.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/checkpoint.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/collect_env.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/collect_env.cpython-311.pyc index d906a302..769cc39c 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/collect_env.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/collect_env.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/cpp_backtrace.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/cpp_backtrace.cpython-311.pyc index 32657bd7..21a9c0df 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/cpp_backtrace.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/cpp_backtrace.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/deterministic.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/deterministic.cpython-311.pyc index 7f6c86a0..27854986 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/deterministic.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/deterministic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/dlpack.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/dlpack.cpython-311.pyc index afca3e4d..d1b77d6d 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/dlpack.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/dlpack.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/hooks.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/hooks.cpython-311.pyc index b2507512..5d0e9629 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/hooks.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/hooks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/throughput_benchmark.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/throughput_benchmark.cpython-311.pyc index d82b6601..d77411a8 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/throughput_benchmark.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/throughput_benchmark.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/__pycache__/weak.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/__pycache__/weak.cpython-311.pyc index 91878a38..2d7b574c 100644 Binary files a/.venv/Lib/site-packages/torch/utils/__pycache__/weak.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/__pycache__/weak.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/__init__.cpython-311.pyc index 34f33d50..4088fd03 100644 Binary files a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/functions.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/functions.cpython-311.pyc index 7ea353dc..de7af010 100644 Binary files a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/functions.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/interp.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/interp.cpython-311.pyc index 5ce683b4..b61b7469 100644 Binary files a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/interp.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/interp.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/reference.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/reference.cpython-311.pyc index 72e73c78..fd60780b 100644 Binary files a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/reference.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/reference.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/singleton_int.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/singleton_int.cpython-311.pyc index 0953efc0..990231fe 100644 Binary files a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/singleton_int.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/singleton_int.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/solve.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/solve.cpython-311.pyc index c0612e3f..f3bf5f59 100644 Binary files a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/solve.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/solve.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/value_ranges.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/value_ranges.cpython-311.pyc index bc371032..07935659 100644 Binary files a/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/value_ranges.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/_sympy/__pycache__/value_ranges.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/backcompat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/backcompat/__pycache__/__init__.cpython-311.pyc index 8c2616ff..55a9d0d3 100644 Binary files a/.venv/Lib/site-packages/torch/utils/backcompat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/backcompat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/__pycache__/__init__.cpython-311.pyc index 824e041e..478a2f87 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataloader.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataloader.cpython-311.pyc index 69aad7f1..0363703e 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataloader.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataloader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataset.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataset.cpython-311.pyc index 8e9cd691..7a62f2ef 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataset.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/__pycache__/dataset.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/__pycache__/distributed.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/__pycache__/distributed.cpython-311.pyc index e132856f..111492f2 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/__pycache__/distributed.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/__pycache__/distributed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph.cpython-311.pyc index 2d0d8091..ad9ff176 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph_settings.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph_settings.cpython-311.pyc index 9892bccc..db997926 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph_settings.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/__pycache__/graph_settings.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/__pycache__/sampler.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/__pycache__/sampler.cpython-311.pyc index 6b9feff2..a5ddce20 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/__pycache__/sampler.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/__pycache__/sampler.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/__init__.cpython-311.pyc index fe6b4d77..c5f936b7 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/collate.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/collate.cpython-311.pyc index a623738d..2a1a967f 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/collate.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/collate.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/fetch.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/fetch.cpython-311.pyc index 62e8b49e..35cdee3f 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/fetch.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/fetch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/pin_memory.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/pin_memory.cpython-311.pyc index da890e76..09d28f74 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/pin_memory.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/pin_memory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/signal_handling.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/signal_handling.cpython-311.pyc index 3e853bd2..3c62f665 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/signal_handling.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/signal_handling.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/worker.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/worker.cpython-311.pyc index 77c6a4fb..1489e8a6 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/worker.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/_utils/__pycache__/worker.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/__init__.cpython-311.pyc index 8d5ff539..14e726b7 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_decorator.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_decorator.cpython-311.pyc index bf8b122d..4fd06920 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_decorator.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_decorator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_hook_iterator.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_hook_iterator.cpython-311.pyc index 5d8253a3..36cdb07f 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_hook_iterator.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_hook_iterator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_typing.cpython-311.pyc index f9a2487d..0c6b64db 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_typing.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/datapipe.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/datapipe.cpython-311.pyc index c3d42647..1ca8dbd1 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/datapipe.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/__pycache__/datapipe.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/__init__.cpython-311.pyc index e8d358e8..1746bc03 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframe_wrapper.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframe_wrapper.cpython-311.pyc index d4fd2796..10682a7b 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframe_wrapper.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframe_wrapper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframes.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframes.cpython-311.pyc index b1bfc3d7..2fb1009c 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframes.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/dataframes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/datapipes.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/datapipes.cpython-311.pyc index 18805b1f..3562d95b 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/datapipes.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/datapipes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/structures.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/structures.cpython-311.pyc index 03047df1..da5bf0cb 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/structures.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/dataframe/__pycache__/structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/__init__.cpython-311.pyc index 8f610ac4..ecde8b09 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/callable.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/callable.cpython-311.pyc index 55fbd2bb..7a35d53b 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/callable.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/callable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combinatorics.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combinatorics.cpython-311.pyc index fccb73b2..fa709fa0 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combinatorics.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combinatorics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combining.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combining.cpython-311.pyc index eaa4a0f7..071c7513 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combining.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/combining.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/filelister.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/filelister.cpython-311.pyc index 7d41dabf..e05060bf 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/filelister.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/filelister.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/fileopener.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/fileopener.cpython-311.pyc index 9b28ac4e..8b6734dc 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/fileopener.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/fileopener.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/grouping.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/grouping.cpython-311.pyc index 08d92607..1af03aa3 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/grouping.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/grouping.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/routeddecoder.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/routeddecoder.cpython-311.pyc index 70f706f3..d3187b10 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/routeddecoder.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/routeddecoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/selecting.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/selecting.cpython-311.pyc index 67351fcd..511deedf 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/selecting.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/selecting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/sharding.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/sharding.cpython-311.pyc index ad37fb4c..7866e61f 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/sharding.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/sharding.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/streamreader.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/streamreader.cpython-311.pyc index 9b06f246..02f304fa 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/streamreader.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/streamreader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/utils.cpython-311.pyc index cb4f4599..99b519f9 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/iter/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/__init__.cpython-311.pyc index 86ee4e76..3ba253e4 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/callable.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/callable.cpython-311.pyc index 723a6a4d..46645747 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/callable.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/callable.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combinatorics.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combinatorics.cpython-311.pyc index 242d2505..b20ad2af 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combinatorics.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combinatorics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combining.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combining.cpython-311.pyc index 7a676dcf..4c7d9687 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combining.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/combining.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/grouping.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/grouping.cpython-311.pyc index 302e0896..34aecef3 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/grouping.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/grouping.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/utils.cpython-311.pyc index c5425171..f5e4185a 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/map/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/__init__.cpython-311.pyc index f15b80e2..33b14805 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/common.cpython-311.pyc index d808cbdf..f0201031 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/decoder.cpython-311.pyc b/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/decoder.cpython-311.pyc index 2fdb59b9..19635ed8 100644 Binary files a/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/decoder.cpython-311.pyc and b/.venv/Lib/site-packages/torch/utils/data/datapipes/utils/__pycache__/decoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/xpu/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torch/xpu/__pycache__/__init__.cpython-311.pyc index 36ee8833..7425f190 100644 Binary files a/.venv/Lib/site-packages/torch/xpu/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torch/xpu/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/xpu/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/torch/xpu/__pycache__/_utils.cpython-311.pyc index 69cf5013..aa4016c2 100644 Binary files a/.venv/Lib/site-packages/torch/xpu/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torch/xpu/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/xpu/__pycache__/random.cpython-311.pyc b/.venv/Lib/site-packages/torch/xpu/__pycache__/random.cpython-311.pyc index 7fd5a0a7..ce678e7d 100644 Binary files a/.venv/Lib/site-packages/torch/xpu/__pycache__/random.cpython-311.pyc and b/.venv/Lib/site-packages/torch/xpu/__pycache__/random.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torch/xpu/__pycache__/streams.cpython-311.pyc b/.venv/Lib/site-packages/torch/xpu/__pycache__/streams.cpython-311.pyc index b7e30d3d..f39a55e2 100644 Binary files a/.venv/Lib/site-packages/torch/xpu/__pycache__/streams.cpython-311.pyc and b/.venv/Lib/site-packages/torch/xpu/__pycache__/streams.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/__pycache__/__init__.cpython-311.pyc index 3c054371..7ab2df4e 100644 Binary files a/.venv/Lib/site-packages/torchaudio/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/__pycache__/kaldi_io.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/__pycache__/kaldi_io.cpython-311.pyc index e9eab662..edf696ce 100644 Binary files a/.venv/Lib/site-packages/torchaudio/__pycache__/kaldi_io.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/__pycache__/kaldi_io.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/__pycache__/version.cpython-311.pyc index b7a62cc9..3b740b84 100644 Binary files a/.venv/Lib/site-packages/torchaudio/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/__init__.cpython-311.pyc index 80f66244..e8bd6fa6 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/backend.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/backend.cpython-311.pyc index 28bd93ea..aa5672a6 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/backend.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/common.cpython-311.pyc index de5dc31c..98751d95 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/ffmpeg.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/ffmpeg.cpython-311.pyc index 15137a65..ffcfd2fe 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/ffmpeg.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/ffmpeg.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile.cpython-311.pyc index ce763aeb..19d2f8f3 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile_backend.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile_backend.cpython-311.pyc index 10a270fd..38f460cc 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile_backend.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/soundfile_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/sox.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/sox.cpython-311.pyc index 8aa4dea0..e66799c2 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/sox.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/sox.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/utils.cpython-311.pyc index c9faf820..294f3733 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_backend/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/__init__.cpython-311.pyc index d02682fb..4c349162 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/utils.cpython-311.pyc index 92fa9aa6..7f518bb7 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_extension/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/__init__.cpython-311.pyc index 872c4d14..a5bc38a9 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/module_utils.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/module_utils.cpython-311.pyc index 5961f506..3ff36f2b 100644 Binary files a/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/module_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/_internal/__pycache__/module_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/__init__.cpython-311.pyc index b6ba834f..2422bad9 100644 Binary files a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/common.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/common.cpython-311.pyc index 3996bb1a..051c545a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/common.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/common.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/no_backend.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/no_backend.cpython-311.pyc index 0e88a067..d4afa146 100644 Binary files a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/no_backend.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/no_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/soundfile_backend.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/soundfile_backend.cpython-311.pyc index 2d538ffb..ddf4338c 100644 Binary files a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/soundfile_backend.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/soundfile_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/sox_io_backend.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/sox_io_backend.cpython-311.pyc index 6d40aa22..1809ee91 100644 Binary files a/.venv/Lib/site-packages/torchaudio/backend/__pycache__/sox_io_backend.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/backend/__pycache__/sox_io_backend.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/__init__.cpython-311.pyc index 91a6c306..1d075d20 100644 Binary files a/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/kaldi.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/kaldi.cpython-311.pyc index 1d57a3cb..d2f0e80d 100644 Binary files a/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/kaldi.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/compliance/__pycache__/kaldi.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/__init__.cpython-311.pyc index 5c3ed344..1ceca345 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmuarctic.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmuarctic.cpython-311.pyc index 442b9cd9..d1c2308e 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmuarctic.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmuarctic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmudict.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmudict.cpython-311.pyc index 4d11224d..f62eee2d 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmudict.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/cmudict.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/commonvoice.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/commonvoice.cpython-311.pyc index 9b546eb5..520c72ac 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/commonvoice.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/commonvoice.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/dr_vctk.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/dr_vctk.cpython-311.pyc index 678fd8c3..f41b1100 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/dr_vctk.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/dr_vctk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/fluentcommands.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/fluentcommands.cpython-311.pyc index 76930735..f0f7c53a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/fluentcommands.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/fluentcommands.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/gtzan.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/gtzan.cpython-311.pyc index 6c2b518b..8373f06d 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/gtzan.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/gtzan.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/iemocap.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/iemocap.cpython-311.pyc index 96705ea9..48b9e22d 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/iemocap.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/iemocap.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librilight_limited.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librilight_limited.cpython-311.pyc index c30339b5..ac8d6755 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librilight_limited.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librilight_limited.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librimix.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librimix.cpython-311.pyc index c73354e5..bf2cd115 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librimix.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librimix.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech.cpython-311.pyc index a2bba78c..29e68e27 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech_biasing.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech_biasing.cpython-311.pyc index dfa816a8..4204076a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech_biasing.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/librispeech_biasing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/libritts.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/libritts.cpython-311.pyc index d446793c..b6904364 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/libritts.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/libritts.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/ljspeech.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/ljspeech.cpython-311.pyc index c6f3f0e8..8c86ff0c 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/ljspeech.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/ljspeech.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/musdb_hq.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/musdb_hq.cpython-311.pyc index 025ffef9..db82c5f2 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/musdb_hq.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/musdb_hq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/quesst14.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/quesst14.cpython-311.pyc index de45f647..05fdb1f8 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/quesst14.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/quesst14.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/snips.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/snips.cpython-311.pyc index 2334b7db..785b6743 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/snips.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/snips.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/speechcommands.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/speechcommands.cpython-311.pyc index 1d568a03..60085ca2 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/speechcommands.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/speechcommands.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/tedlium.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/tedlium.cpython-311.pyc index 208e4989..db31bfbc 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/tedlium.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/tedlium.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/utils.cpython-311.pyc index b8c471e2..7fba42f5 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/vctk.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/vctk.cpython-311.pyc index 9f595089..1a692732 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/vctk.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/vctk.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/voxceleb1.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/voxceleb1.cpython-311.pyc index eb0099a3..c3de906a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/voxceleb1.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/voxceleb1.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/yesno.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/yesno.cpython-311.pyc index e7dd28bc..525186df 100644 Binary files a/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/yesno.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/datasets/__pycache__/yesno.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/__init__.cpython-311.pyc index bdb469e9..0a600e33 100644 Binary files a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/_alignment.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/_alignment.cpython-311.pyc index 9676cfd5..02751254 100644 Binary files a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/_alignment.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/_alignment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/filtering.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/filtering.cpython-311.pyc index 79ae7460..61635642 100644 Binary files a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/filtering.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/filtering.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/functional.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/functional.cpython-311.pyc index 42c66e57..fb4d33dc 100644 Binary files a/.venv/Lib/site-packages/torchaudio/functional/__pycache__/functional.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/functional/__pycache__/functional.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/io/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/io/__pycache__/__init__.cpython-311.pyc index 72efc522..9ad1fae0 100644 Binary files a/.venv/Lib/site-packages/torchaudio/io/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/io/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/io/__pycache__/_effector.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/io/__pycache__/_effector.cpython-311.pyc index b9e16f5b..9084ee22 100644 Binary files a/.venv/Lib/site-packages/torchaudio/io/__pycache__/_effector.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/io/__pycache__/_effector.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/io/__pycache__/_playback.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/io/__pycache__/_playback.cpython-311.pyc index b7ce19e1..5736a27e 100644 Binary files a/.venv/Lib/site-packages/torchaudio/io/__pycache__/_playback.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/io/__pycache__/_playback.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/lib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/lib/__pycache__/__init__.cpython-311.pyc index 4d77ca0b..9c4bc2b0 100644 Binary files a/.venv/Lib/site-packages/torchaudio/lib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/lib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/__init__.cpython-311.pyc index 51a5abfa..2df4d5fd 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/_hdemucs.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/_hdemucs.cpython-311.pyc index 2175c507..1069a749 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/_hdemucs.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/_hdemucs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/conformer.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/conformer.cpython-311.pyc index 9faa7577..ee7ebdb7 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/conformer.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/conformer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/conv_tasnet.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/conv_tasnet.cpython-311.pyc index 57f888ff..b1e14ce2 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/conv_tasnet.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/conv_tasnet.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/deepspeech.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/deepspeech.cpython-311.pyc index 17aa700a..14ffc43a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/deepspeech.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/deepspeech.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/emformer.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/emformer.cpython-311.pyc index 61ced2b1..e1232da3 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/emformer.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/emformer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt.cpython-311.pyc index 8d438fb8..46416742 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt_decoder.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt_decoder.cpython-311.pyc index 53491def..352e79aa 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt_decoder.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/rnnt_decoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/tacotron2.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/tacotron2.cpython-311.pyc index 421a2c90..2e13e2d8 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/tacotron2.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/tacotron2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/wav2letter.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/wav2letter.cpython-311.pyc index 6afe0659..0bebabd9 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/wav2letter.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/wav2letter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/__pycache__/wavernn.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/__pycache__/wavernn.cpython-311.pyc index ffab7649..cbd4ac53 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/__pycache__/wavernn.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/__pycache__/wavernn.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/__init__.cpython-311.pyc index b4035efa..8a4186fc 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/objective.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/objective.cpython-311.pyc index 62fa3953..4fa0caa0 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/objective.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/objective.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/subjective.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/subjective.cpython-311.pyc index 9e989696..0de44e46 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/subjective.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/squim/__pycache__/subjective.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/__init__.cpython-311.pyc index 798101e3..a71c18f5 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/components.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/components.cpython-311.pyc index 6095c556..22aff5cd 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/components.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/components.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/model.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/model.cpython-311.pyc index ec49c270..4efe0c4a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/model.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/wavlm_attention.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/wavlm_attention.cpython-311.pyc index 0eca27f5..6135bbad 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/wavlm_attention.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/__pycache__/wavlm_attention.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/__init__.cpython-311.pyc index 20ecccf1..e725d2d1 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_fairseq.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_fairseq.cpython-311.pyc index 6f4ef09d..68d7d085 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_fairseq.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_fairseq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_huggingface.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_huggingface.cpython-311.pyc index 19792202..6bcbad4b 100644 Binary files a/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_huggingface.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/models/wav2vec2/utils/__pycache__/import_huggingface.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/__init__.cpython-311.pyc index 78a137f0..5012ac78 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_source_separation_pipeline.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_source_separation_pipeline.cpython-311.pyc index 0cd3d95c..827263e4 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_source_separation_pipeline.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_source_separation_pipeline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_squim_pipeline.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_squim_pipeline.cpython-311.pyc index 176ac722..0c919339 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_squim_pipeline.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/_squim_pipeline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/rnnt_pipeline.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/rnnt_pipeline.cpython-311.pyc index 719f057b..77d18c50 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/rnnt_pipeline.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/__pycache__/rnnt_pipeline.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/__init__.cpython-311.pyc index a0aed91d..c952a2aa 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/impl.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/impl.cpython-311.pyc index 3749a56a..aa359d5b 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/impl.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/interface.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/interface.cpython-311.pyc index 31a1db2e..503b5d74 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/interface.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/interface.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/utils.cpython-311.pyc index b163b489..d1af0a9e 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_tts/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/__init__.cpython-311.pyc index 1c47c27e..14ec92cb 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/aligner.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/aligner.cpython-311.pyc index d9aa6e3c..d3bfea82 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/aligner.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/aligner.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/impl.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/impl.cpython-311.pyc index 249ff629..e78cbcd0 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/impl.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/impl.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/utils.cpython-311.pyc index 889d5b17..0ada7bdd 100644 Binary files a/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/pipelines/_wav2vec2/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/__init__.cpython-311.pyc index bcae4536..c35aad1a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/sox_effects.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/sox_effects.cpython-311.pyc index 962832c5..8633d3f2 100644 Binary files a/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/sox_effects.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/sox_effects/__pycache__/sox_effects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/__init__.cpython-311.pyc index 8ff82db1..eaabfb4a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_multi_channel.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_multi_channel.cpython-311.pyc index 3c655438..cab9b760 100644 Binary files a/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_multi_channel.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_multi_channel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_transforms.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_transforms.cpython-311.pyc index a6bf24c9..3acc01c2 100644 Binary files a/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_transforms.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/transforms/__pycache__/_transforms.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/utils/__pycache__/__init__.cpython-311.pyc index 8c32605c..edc9299a 100644 Binary files a/.venv/Lib/site-packages/torchaudio/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/utils/__pycache__/download.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/utils/__pycache__/download.cpython-311.pyc index 7cf5a22e..cf96307d 100644 Binary files a/.venv/Lib/site-packages/torchaudio/utils/__pycache__/download.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/utils/__pycache__/download.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchaudio/utils/__pycache__/sox_utils.cpython-311.pyc b/.venv/Lib/site-packages/torchaudio/utils/__pycache__/sox_utils.cpython-311.pyc index 6d519980..759245b9 100644 Binary files a/.venv/Lib/site-packages/torchaudio/utils/__pycache__/sox_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchaudio/utils/__pycache__/sox_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchgen/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torchgen/__pycache__/__init__.cpython-311.pyc index 888bb5de..effcc795 100644 Binary files a/.venv/Lib/site-packages/torchgen/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torchgen/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchgen/__pycache__/code_template.cpython-311.pyc b/.venv/Lib/site-packages/torchgen/__pycache__/code_template.cpython-311.pyc index d20aa20c..adbe84b9 100644 Binary files a/.venv/Lib/site-packages/torchgen/__pycache__/code_template.cpython-311.pyc and b/.venv/Lib/site-packages/torchgen/__pycache__/code_template.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchgen/__pycache__/model.cpython-311.pyc b/.venv/Lib/site-packages/torchgen/__pycache__/model.cpython-311.pyc index 19d5849a..085eacff 100644 Binary files a/.venv/Lib/site-packages/torchgen/__pycache__/model.cpython-311.pyc and b/.venv/Lib/site-packages/torchgen/__pycache__/model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torchgen/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torchgen/__pycache__/utils.cpython-311.pyc index 00b8754b..3e1ced7d 100644 Binary files a/.venv/Lib/site-packages/torchgen/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torchgen/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torio/__pycache__/__init__.cpython-311.pyc index 11eb220d..53635dee 100644 Binary files a/.venv/Lib/site-packages/torio/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torio/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/_extension/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torio/_extension/__pycache__/__init__.cpython-311.pyc index 2dc439aa..7e0dacc1 100644 Binary files a/.venv/Lib/site-packages/torio/_extension/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torio/_extension/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/_extension/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/torio/_extension/__pycache__/utils.cpython-311.pyc index a368162e..3bd9fb53 100644 Binary files a/.venv/Lib/site-packages/torio/_extension/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/torio/_extension/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/io/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torio/io/__pycache__/__init__.cpython-311.pyc index 603b2c6a..6782de61 100644 Binary files a/.venv/Lib/site-packages/torio/io/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torio/io/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_decoder.cpython-311.pyc b/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_decoder.cpython-311.pyc index d2031d5b..55df382b 100644 Binary files a/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_decoder.cpython-311.pyc and b/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_decoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_encoder.cpython-311.pyc b/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_encoder.cpython-311.pyc index a37d2c8a..14851c58 100644 Binary files a/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_encoder.cpython-311.pyc and b/.venv/Lib/site-packages/torio/io/__pycache__/_streaming_media_encoder.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/lib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torio/lib/__pycache__/__init__.cpython-311.pyc index 23bed328..7b0f646c 100644 Binary files a/.venv/Lib/site-packages/torio/lib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torio/lib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/torio/utils/__pycache__/__init__.cpython-311.pyc index 5ff2b758..0c66dc1b 100644 Binary files a/.venv/Lib/site-packages/torio/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/torio/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/torio/utils/__pycache__/ffmpeg_utils.cpython-311.pyc b/.venv/Lib/site-packages/torio/utils/__pycache__/ffmpeg_utils.cpython-311.pyc index aa247050..9a1e4edf 100644 Binary files a/.venv/Lib/site-packages/torio/utils/__pycache__/ffmpeg_utils.cpython-311.pyc and b/.venv/Lib/site-packages/torio/utils/__pycache__/ffmpeg_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/__init__.cpython-311.pyc index bc05b6a7..c1b4505c 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/_dist_ver.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/_dist_ver.cpython-311.pyc index 6eeb50cc..f9a61c90 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/_dist_ver.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/_dist_ver.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/_monitor.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/_monitor.cpython-311.pyc index 6d3d79fd..9b1626c3 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/_monitor.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/_monitor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/_tqdm_pandas.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/_tqdm_pandas.cpython-311.pyc index 4bc1bac6..9714ee5a 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/_tqdm_pandas.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/_tqdm_pandas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/asyncio.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/asyncio.cpython-311.pyc index 518b4261..448f555c 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/asyncio.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/asyncio.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/auto.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/auto.cpython-311.pyc index b4b04b38..fd95c44e 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/auto.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/auto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/autonotebook.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/autonotebook.cpython-311.pyc index 0ec8c4df..1a5c77fb 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/autonotebook.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/autonotebook.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/cli.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/cli.cpython-311.pyc index 233b0b59..f2b46ec1 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/cli.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/cli.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/gui.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/gui.cpython-311.pyc index 7b1c0098..5ba40ed9 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/gui.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/gui.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/std.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/std.cpython-311.pyc index e717bec6..3a71cd83 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/std.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/std.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/utils.cpython-311.pyc index 22bb6f90..2d28c956 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/__pycache__/version.cpython-311.pyc index bb502fc6..62537918 100644 Binary files a/.venv/Lib/site-packages/tqdm/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/contrib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/contrib/__pycache__/__init__.cpython-311.pyc index 9ec0d1bb..ab69cdd2 100644 Binary files a/.venv/Lib/site-packages/tqdm/contrib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/contrib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tqdm/contrib/__pycache__/concurrent.cpython-311.pyc b/.venv/Lib/site-packages/tqdm/contrib/__pycache__/concurrent.cpython-311.pyc index b0faa8e2..2fadc703 100644 Binary files a/.venv/Lib/site-packages/tqdm/contrib/__pycache__/concurrent.cpython-311.pyc and b/.venv/Lib/site-packages/tqdm/contrib/__pycache__/concurrent.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/__init__.cpython-311.pyc index 41ccb06b..2979fd54 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/analytics.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/analytics.cpython-311.pyc index 1c93cfde..70aae5f1 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/analytics.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/analytics.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/callbacks.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/callbacks.cpython-311.pyc index 9c5bbbb5..9d26d250 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/callbacks.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/callbacks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/generic_utils.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/generic_utils.cpython-311.pyc index 67019e08..f5e28c40 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/generic_utils.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/generic_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/io.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/io.cpython-311.pyc index 829856b9..2f86a35f 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/io.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/io.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/logger.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/logger.cpython-311.pyc index b2faf6d7..783a116f 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/logger.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/logger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/model.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/model.cpython-311.pyc index 59f1d6b8..e28ee326 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/model.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/model.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/torch.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/torch.cpython-311.pyc index ab64b1b2..adc74793 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/torch.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/torch.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/trainer.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/trainer.cpython-311.pyc index db9e0f2a..40bf1132 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/trainer.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/trainer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/__pycache__/trainer_utils.cpython-311.pyc b/.venv/Lib/site-packages/trainer/__pycache__/trainer_utils.cpython-311.pyc index 6fdd9a0b..388d2cf3 100644 Binary files a/.venv/Lib/site-packages/trainer/__pycache__/trainer_utils.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/__pycache__/trainer_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/logging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/trainer/logging/__pycache__/__init__.cpython-311.pyc index 43a0e284..c09d2910 100644 Binary files a/.venv/Lib/site-packages/trainer/logging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/logging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/logging/__pycache__/base_dash_logger.cpython-311.pyc b/.venv/Lib/site-packages/trainer/logging/__pycache__/base_dash_logger.cpython-311.pyc index 07d534b3..05eed958 100644 Binary files a/.venv/Lib/site-packages/trainer/logging/__pycache__/base_dash_logger.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/logging/__pycache__/base_dash_logger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/logging/__pycache__/console_logger.cpython-311.pyc b/.venv/Lib/site-packages/trainer/logging/__pycache__/console_logger.cpython-311.pyc index 339c2e01..9b20b98a 100644 Binary files a/.venv/Lib/site-packages/trainer/logging/__pycache__/console_logger.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/logging/__pycache__/console_logger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/logging/__pycache__/dummy_logger.cpython-311.pyc b/.venv/Lib/site-packages/trainer/logging/__pycache__/dummy_logger.cpython-311.pyc index 0f82a523..eb765d75 100644 Binary files a/.venv/Lib/site-packages/trainer/logging/__pycache__/dummy_logger.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/logging/__pycache__/dummy_logger.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/trainer/utils/__pycache__/__init__.cpython-311.pyc index 2419d2be..eaf12ed6 100644 Binary files a/.venv/Lib/site-packages/trainer/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/utils/__pycache__/cpu_memory.cpython-311.pyc b/.venv/Lib/site-packages/trainer/utils/__pycache__/cpu_memory.cpython-311.pyc index 434fc0b5..1eb2aa19 100644 Binary files a/.venv/Lib/site-packages/trainer/utils/__pycache__/cpu_memory.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/utils/__pycache__/cpu_memory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/utils/__pycache__/cuda_memory.cpython-311.pyc b/.venv/Lib/site-packages/trainer/utils/__pycache__/cuda_memory.cpython-311.pyc index 51dc2bf4..3a2abf49 100644 Binary files a/.venv/Lib/site-packages/trainer/utils/__pycache__/cuda_memory.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/utils/__pycache__/cuda_memory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/trainer/utils/__pycache__/distributed.cpython-311.pyc b/.venv/Lib/site-packages/trainer/utils/__pycache__/distributed.cpython-311.pyc index 13c2e5e8..e0b25df0 100644 Binary files a/.venv/Lib/site-packages/trainer/utils/__pycache__/distributed.cpython-311.pyc and b/.venv/Lib/site-packages/trainer/utils/__pycache__/distributed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/__init__.cpython-311.pyc index 8162043d..2d50f49d 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/activations.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/activations.cpython-311.pyc index 41de0315..baea7588 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/activations.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/activations.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/cache_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/cache_utils.cpython-311.pyc index 315d9932..c40ce039 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/cache_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/cache_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/configuration_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/configuration_utils.cpython-311.pyc index bee70209..f2ea4425 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/configuration_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/configuration_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/convert_slow_tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/convert_slow_tokenizer.cpython-311.pyc index 94172c02..6f6fe7c6 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/convert_slow_tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/convert_slow_tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_check.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_check.cpython-311.pyc index 522a20c6..d35e76d8 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_check.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_check.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_table.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_table.cpython-311.pyc index d03690dc..bdb17f87 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_table.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/dependency_versions_table.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/dynamic_module_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/dynamic_module_utils.cpython-311.pyc index d6828631..fb5dac90 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/dynamic_module_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/dynamic_module_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/file_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/file_utils.cpython-311.pyc index 08c98621..f9f2e7b1 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/file_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/file_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/modeling_outputs.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/modeling_outputs.cpython-311.pyc index a24dd9a7..3b3f3729 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/modeling_outputs.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/modeling_outputs.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/modeling_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/modeling_utils.cpython-311.pyc index a0e1470f..d2d66168 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/modeling_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/modeling_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/pytorch_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/pytorch_utils.cpython-311.pyc index dbe1c62a..b8faf2ab 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/pytorch_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/pytorch_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/safetensors_conversion.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/safetensors_conversion.cpython-311.pyc index fe1faa78..51381fc5 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/safetensors_conversion.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/safetensors_conversion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils.cpython-311.pyc index 94a9a1c5..cd1d1569 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_base.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_base.cpython-311.pyc index 2f2ccb17..f0d74e04 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_base.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_fast.cpython-311.pyc b/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_fast.cpython-311.pyc index 0f347467..6bbfc417 100644 Binary files a/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_fast.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/__pycache__/tokenization_utils_fast.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/__init__.cpython-311.pyc index 471aa22f..22e9448d 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_constraints.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_constraints.cpython-311.pyc index d2f849b4..04d9d745 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_constraints.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_constraints.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_search.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_search.cpython-311.pyc index b186479e..f016e588 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_search.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/beam_search.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/candidate_generator.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/candidate_generator.cpython-311.pyc index ce83880c..32279023 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/candidate_generator.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/candidate_generator.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/configuration_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/configuration_utils.cpython-311.pyc index bf0acba3..f9f9121e 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/configuration_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/configuration_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/logits_process.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/logits_process.cpython-311.pyc index 52c8b3a9..78e7bbf9 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/logits_process.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/logits_process.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/stopping_criteria.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/stopping_criteria.cpython-311.pyc index f53278be..93a4b231 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/stopping_criteria.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/stopping_criteria.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/generation/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/generation/__pycache__/utils.cpython-311.pyc index 7f31b8ba..4e1cc266 100644 Binary files a/.venv/Lib/site-packages/transformers/generation/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/generation/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/integrations/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/integrations/__pycache__/__init__.cpython-311.pyc index ed7dd158..931fd383 100644 Binary files a/.venv/Lib/site-packages/transformers/integrations/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/integrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/integrations/__pycache__/aqlm.cpython-311.pyc b/.venv/Lib/site-packages/transformers/integrations/__pycache__/aqlm.cpython-311.pyc index 41f5f2f3..6438a594 100644 Binary files a/.venv/Lib/site-packages/transformers/integrations/__pycache__/aqlm.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/integrations/__pycache__/aqlm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/integrations/__pycache__/deepspeed.cpython-311.pyc b/.venv/Lib/site-packages/transformers/integrations/__pycache__/deepspeed.cpython-311.pyc index fe61065c..5a48d5a5 100644 Binary files a/.venv/Lib/site-packages/transformers/integrations/__pycache__/deepspeed.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/integrations/__pycache__/deepspeed.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/integrations/__pycache__/peft.cpython-311.pyc b/.venv/Lib/site-packages/transformers/integrations/__pycache__/peft.cpython-311.pyc index 769a75a0..a07d8e8b 100644 Binary files a/.venv/Lib/site-packages/transformers/integrations/__pycache__/peft.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/integrations/__pycache__/peft.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/__pycache__/__init__.cpython-311.pyc index 8a70244a..a83b1eaa 100644 Binary files a/.venv/Lib/site-packages/transformers/models/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/albert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/albert/__pycache__/__init__.cpython-311.pyc index 25df0613..870a8ea8 100644 Binary files a/.venv/Lib/site-packages/transformers/models/albert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/albert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/align/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/align/__pycache__/__init__.cpython-311.pyc index a116d916..2867d8fb 100644 Binary files a/.venv/Lib/site-packages/transformers/models/align/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/align/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/altclip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/altclip/__pycache__/__init__.cpython-311.pyc index da43a677..f0311aa6 100644 Binary files a/.venv/Lib/site-packages/transformers/models/altclip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/altclip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-311.pyc index 0794f63e..d4dfb358 100644 Binary files a/.venv/Lib/site-packages/transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/__init__.cpython-311.pyc index b53608cf..97227852 100644 Binary files a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/auto_factory.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/auto_factory.cpython-311.pyc index 136c1f6a..a69f337b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/auto_factory.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/auto_factory.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/configuration_auto.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/configuration_auto.cpython-311.pyc index 15b743eb..984b3c75 100644 Binary files a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/configuration_auto.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/configuration_auto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/modeling_auto.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/modeling_auto.cpython-311.pyc index 47f796c9..cd02d048 100644 Binary files a/.venv/Lib/site-packages/transformers/models/auto/__pycache__/modeling_auto.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/auto/__pycache__/modeling_auto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/autoformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/autoformer/__pycache__/__init__.cpython-311.pyc index 1f85a218..b20b968a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/autoformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/autoformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bark/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bark/__pycache__/__init__.cpython-311.pyc index 5b98529e..32604dbe 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bark/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bark/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bart/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bart/__pycache__/__init__.cpython-311.pyc index 258bb8a3..9db0471e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bart/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bart/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/barthez/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/barthez/__pycache__/__init__.cpython-311.pyc index 4a3c6ee0..f0b9aaee 100644 Binary files a/.venv/Lib/site-packages/transformers/models/barthez/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/barthez/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bartpho/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bartpho/__pycache__/__init__.cpython-311.pyc index 3118fb7d..6551a186 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bartpho/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bartpho/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/beit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/beit/__pycache__/__init__.cpython-311.pyc index 948a3dee..ed685814 100644 Binary files a/.venv/Lib/site-packages/transformers/models/beit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/beit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bert/__pycache__/__init__.cpython-311.pyc index e483752f..e7a7b226 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bert_generation/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bert_generation/__pycache__/__init__.cpython-311.pyc index 805ff721..339e4daf 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bert_generation/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bert_generation/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bert_japanese/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bert_japanese/__pycache__/__init__.cpython-311.pyc index 488d4473..d2f33f41 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bert_japanese/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bert_japanese/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bertweet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bertweet/__pycache__/__init__.cpython-311.pyc index 05a2a018..aa4876fa 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bertweet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bertweet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/big_bird/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/big_bird/__pycache__/__init__.cpython-311.pyc index 1f748956..ac2a941c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/big_bird/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/big_bird/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-311.pyc index 116c63fc..79fc3981 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/biogpt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/biogpt/__pycache__/__init__.cpython-311.pyc index 3719f24c..4e9e418e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/biogpt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/biogpt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bit/__pycache__/__init__.cpython-311.pyc index 8b25d88b..903d659f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/blenderbot/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/blenderbot/__pycache__/__init__.cpython-311.pyc index 025b1cef..7e6b4e16 100644 Binary files a/.venv/Lib/site-packages/transformers/models/blenderbot/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/blenderbot/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/blenderbot_small/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/blenderbot_small/__pycache__/__init__.cpython-311.pyc index 49ce3e62..e86a23d5 100644 Binary files a/.venv/Lib/site-packages/transformers/models/blenderbot_small/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/blenderbot_small/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/blip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/blip/__pycache__/__init__.cpython-311.pyc index 60211b58..fbbe0e32 100644 Binary files a/.venv/Lib/site-packages/transformers/models/blip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/blip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/blip_2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/blip_2/__pycache__/__init__.cpython-311.pyc index 22048480..d96ea41e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/blip_2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/blip_2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bloom/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bloom/__pycache__/__init__.cpython-311.pyc index adef9849..738c6acf 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bloom/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bloom/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bridgetower/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bridgetower/__pycache__/__init__.cpython-311.pyc index 2047207b..9e5c3348 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bridgetower/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bridgetower/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/bros/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/bros/__pycache__/__init__.cpython-311.pyc index f53521a6..db56404d 100644 Binary files a/.venv/Lib/site-packages/transformers/models/bros/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/bros/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/byt5/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/byt5/__pycache__/__init__.cpython-311.pyc index 0570aec4..9585035b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/byt5/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/byt5/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/camembert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/camembert/__pycache__/__init__.cpython-311.pyc index b2d0a8ba..ac093f94 100644 Binary files a/.venv/Lib/site-packages/transformers/models/camembert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/camembert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/canine/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/canine/__pycache__/__init__.cpython-311.pyc index 7f323e9d..3a695f90 100644 Binary files a/.venv/Lib/site-packages/transformers/models/canine/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/canine/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/chinese_clip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/chinese_clip/__pycache__/__init__.cpython-311.pyc index ba790fa2..c9f49ce7 100644 Binary files a/.venv/Lib/site-packages/transformers/models/chinese_clip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/chinese_clip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/clap/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/clap/__pycache__/__init__.cpython-311.pyc index bbf11be0..cff7f512 100644 Binary files a/.venv/Lib/site-packages/transformers/models/clap/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/clap/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/clip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/clip/__pycache__/__init__.cpython-311.pyc index c62ad68b..368b3007 100644 Binary files a/.venv/Lib/site-packages/transformers/models/clip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/clip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/clipseg/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/clipseg/__pycache__/__init__.cpython-311.pyc index 164d7aa3..f59fd188 100644 Binary files a/.venv/Lib/site-packages/transformers/models/clipseg/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/clipseg/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/clvp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/clvp/__pycache__/__init__.cpython-311.pyc index dd7fbeae..0fa7ddf3 100644 Binary files a/.venv/Lib/site-packages/transformers/models/clvp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/clvp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/code_llama/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/code_llama/__pycache__/__init__.cpython-311.pyc index b516d017..65b9bb0c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/code_llama/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/code_llama/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/codegen/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/codegen/__pycache__/__init__.cpython-311.pyc index af4626f1..bf1a9bbf 100644 Binary files a/.venv/Lib/site-packages/transformers/models/codegen/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/codegen/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/cohere/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/cohere/__pycache__/__init__.cpython-311.pyc index ccec70f5..ff962b21 100644 Binary files a/.venv/Lib/site-packages/transformers/models/cohere/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/cohere/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/conditional_detr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/conditional_detr/__pycache__/__init__.cpython-311.pyc index f3e81223..53bc9cc2 100644 Binary files a/.venv/Lib/site-packages/transformers/models/conditional_detr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/conditional_detr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/convbert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/convbert/__pycache__/__init__.cpython-311.pyc index 569c53a2..1d40a402 100644 Binary files a/.venv/Lib/site-packages/transformers/models/convbert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/convbert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/convnext/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/convnext/__pycache__/__init__.cpython-311.pyc index a8cc54d0..88a4e33d 100644 Binary files a/.venv/Lib/site-packages/transformers/models/convnext/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/convnext/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/convnextv2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/convnextv2/__pycache__/__init__.cpython-311.pyc index bc300433..58c25cf0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/convnextv2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/convnextv2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/cpm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/cpm/__pycache__/__init__.cpython-311.pyc index b3ac8b84..fd256d1a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/cpm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/cpm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/cpmant/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/cpmant/__pycache__/__init__.cpython-311.pyc index 704e6b7c..d86c832f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/cpmant/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/cpmant/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/ctrl/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/ctrl/__pycache__/__init__.cpython-311.pyc index b7172476..b6c4604f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/ctrl/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/ctrl/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/cvt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/cvt/__pycache__/__init__.cpython-311.pyc index bf04c945..3acc4a47 100644 Binary files a/.venv/Lib/site-packages/transformers/models/cvt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/cvt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/data2vec/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/data2vec/__pycache__/__init__.cpython-311.pyc index cf587d5c..ee7fbb99 100644 Binary files a/.venv/Lib/site-packages/transformers/models/data2vec/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/data2vec/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/dbrx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/dbrx/__pycache__/__init__.cpython-311.pyc index 61c91c31..dabf4650 100644 Binary files a/.venv/Lib/site-packages/transformers/models/dbrx/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/dbrx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/deberta/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/deberta/__pycache__/__init__.cpython-311.pyc index 1eb56bdb..dc45b0c7 100644 Binary files a/.venv/Lib/site-packages/transformers/models/deberta/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/deberta/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/deberta_v2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/deberta_v2/__pycache__/__init__.cpython-311.pyc index c2870c24..c2812ffd 100644 Binary files a/.venv/Lib/site-packages/transformers/models/deberta_v2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/deberta_v2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/decision_transformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/decision_transformer/__pycache__/__init__.cpython-311.pyc index 46a6d137..bce05b83 100644 Binary files a/.venv/Lib/site-packages/transformers/models/decision_transformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/decision_transformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/deformable_detr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/deformable_detr/__pycache__/__init__.cpython-311.pyc index 1383c123..80a59f33 100644 Binary files a/.venv/Lib/site-packages/transformers/models/deformable_detr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/deformable_detr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/deit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/deit/__pycache__/__init__.cpython-311.pyc index 38ee7277..e37c80ca 100644 Binary files a/.venv/Lib/site-packages/transformers/models/deit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/deit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/__init__.cpython-311.pyc index c8cdb5d5..e1b44bb0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/_archive_maps.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/_archive_maps.cpython-311.pyc index fd992d83..4a4c0275 100644 Binary files a/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/_archive_maps.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/deprecated/__pycache__/_archive_maps.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/depth_anything/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/depth_anything/__pycache__/__init__.cpython-311.pyc index 804b92b5..8f79a017 100644 Binary files a/.venv/Lib/site-packages/transformers/models/depth_anything/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/depth_anything/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/deta/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/deta/__pycache__/__init__.cpython-311.pyc index f5b8a7b8..8f3a0766 100644 Binary files a/.venv/Lib/site-packages/transformers/models/deta/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/deta/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/detr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/detr/__pycache__/__init__.cpython-311.pyc index 4221496e..795b69ff 100644 Binary files a/.venv/Lib/site-packages/transformers/models/detr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/detr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/dialogpt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/dialogpt/__pycache__/__init__.cpython-311.pyc index 98a46e7f..02909d70 100644 Binary files a/.venv/Lib/site-packages/transformers/models/dialogpt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/dialogpt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/dinat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/dinat/__pycache__/__init__.cpython-311.pyc index 27c99230..5c5a45c2 100644 Binary files a/.venv/Lib/site-packages/transformers/models/dinat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/dinat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/dinov2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/dinov2/__pycache__/__init__.cpython-311.pyc index 3d660a94..ef29547c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/dinov2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/dinov2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/distilbert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/distilbert/__pycache__/__init__.cpython-311.pyc index e249e10d..52866d6e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/distilbert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/distilbert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/dit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/dit/__pycache__/__init__.cpython-311.pyc index 711c6f41..8df29a21 100644 Binary files a/.venv/Lib/site-packages/transformers/models/dit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/dit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/donut/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/donut/__pycache__/__init__.cpython-311.pyc index ec5d6c86..27b56a2c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/donut/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/donut/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/dpr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/dpr/__pycache__/__init__.cpython-311.pyc index 7e0cb0c1..6f06cb86 100644 Binary files a/.venv/Lib/site-packages/transformers/models/dpr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/dpr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/dpt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/dpt/__pycache__/__init__.cpython-311.pyc index 082b99bc..154d901e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/dpt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/dpt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/efficientformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/efficientformer/__pycache__/__init__.cpython-311.pyc index 3c9134a9..dc8e290f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/efficientformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/efficientformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/efficientnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/efficientnet/__pycache__/__init__.cpython-311.pyc index 8cdd2654..41adaa5e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/efficientnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/efficientnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/electra/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/electra/__pycache__/__init__.cpython-311.pyc index 3e4d55fb..6687f873 100644 Binary files a/.venv/Lib/site-packages/transformers/models/electra/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/electra/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/encodec/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/encodec/__pycache__/__init__.cpython-311.pyc index 43a0b92b..6d05c8f3 100644 Binary files a/.venv/Lib/site-packages/transformers/models/encodec/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/encodec/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/encoder_decoder/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/encoder_decoder/__pycache__/__init__.cpython-311.pyc index d9797aa4..13cf8e31 100644 Binary files a/.venv/Lib/site-packages/transformers/models/encoder_decoder/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/encoder_decoder/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/ernie/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/ernie/__pycache__/__init__.cpython-311.pyc index 3863fbb3..f34a483f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/ernie/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/ernie/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/ernie_m/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/ernie_m/__pycache__/__init__.cpython-311.pyc index f61428f0..19424ada 100644 Binary files a/.venv/Lib/site-packages/transformers/models/ernie_m/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/ernie_m/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/esm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/esm/__pycache__/__init__.cpython-311.pyc index 63c2b8c0..3009380a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/esm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/esm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/falcon/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/falcon/__pycache__/__init__.cpython-311.pyc index 4ae032fe..97376657 100644 Binary files a/.venv/Lib/site-packages/transformers/models/falcon/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/falcon/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-311.pyc index 02356055..2b4d4db6 100644 Binary files a/.venv/Lib/site-packages/transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/flaubert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/flaubert/__pycache__/__init__.cpython-311.pyc index 3e261788..4c1f6ee7 100644 Binary files a/.venv/Lib/site-packages/transformers/models/flaubert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/flaubert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/flava/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/flava/__pycache__/__init__.cpython-311.pyc index b52a2dd9..0fcdf2d5 100644 Binary files a/.venv/Lib/site-packages/transformers/models/flava/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/flava/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/fnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/fnet/__pycache__/__init__.cpython-311.pyc index c53f5491..c934850d 100644 Binary files a/.venv/Lib/site-packages/transformers/models/fnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/fnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/focalnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/focalnet/__pycache__/__init__.cpython-311.pyc index a13230a1..6e956c83 100644 Binary files a/.venv/Lib/site-packages/transformers/models/focalnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/focalnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/fsmt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/fsmt/__pycache__/__init__.cpython-311.pyc index 1e786d46..c91bbff5 100644 Binary files a/.venv/Lib/site-packages/transformers/models/fsmt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/fsmt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/funnel/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/funnel/__pycache__/__init__.cpython-311.pyc index b5f17279..4bf92aee 100644 Binary files a/.venv/Lib/site-packages/transformers/models/funnel/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/funnel/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/fuyu/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/fuyu/__pycache__/__init__.cpython-311.pyc index 84da1473..d038f7ce 100644 Binary files a/.venv/Lib/site-packages/transformers/models/fuyu/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/fuyu/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gemma/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gemma/__pycache__/__init__.cpython-311.pyc index 6923478b..8c780aca 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gemma/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gemma/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/git/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/git/__pycache__/__init__.cpython-311.pyc index c4e5ae17..ac737faa 100644 Binary files a/.venv/Lib/site-packages/transformers/models/git/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/git/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/glpn/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/glpn/__pycache__/__init__.cpython-311.pyc index 00fefae1..1804168e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/glpn/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/glpn/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/__init__.cpython-311.pyc index 554d19a5..e0df4c91 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-311.pyc index 44723600..00c47188 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-311.pyc index f4ce969e..baea15b4 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt_bigcode/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt_bigcode/__pycache__/__init__.cpython-311.pyc index da58ad34..319d6905 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt_bigcode/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt_bigcode/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt_neo/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt_neo/__pycache__/__init__.cpython-311.pyc index c78688b6..0dd50414 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt_neo/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt_neo/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt_neox/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt_neox/__pycache__/__init__.cpython-311.pyc index a1d55f2f..555804c2 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt_neox/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt_neox/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-311.pyc index 4eaa9a79..e64351fb 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gpt_sw3/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gpt_sw3/__pycache__/__init__.cpython-311.pyc index 930acf4c..cf2c809e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gpt_sw3/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gpt_sw3/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gptj/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gptj/__pycache__/__init__.cpython-311.pyc index 86d8e8b1..6ed7630b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gptj/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gptj/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/gptsan_japanese/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/gptsan_japanese/__pycache__/__init__.cpython-311.pyc index a3ed4c9e..b7c94516 100644 Binary files a/.venv/Lib/site-packages/transformers/models/gptsan_japanese/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/gptsan_japanese/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/graphormer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/graphormer/__pycache__/__init__.cpython-311.pyc index ccf41d3a..a0111c40 100644 Binary files a/.venv/Lib/site-packages/transformers/models/graphormer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/graphormer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/grounding_dino/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/grounding_dino/__pycache__/__init__.cpython-311.pyc index 68b75a19..5b7580b0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/grounding_dino/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/grounding_dino/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/groupvit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/groupvit/__pycache__/__init__.cpython-311.pyc index bafdb7eb..5435ea6b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/groupvit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/groupvit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/herbert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/herbert/__pycache__/__init__.cpython-311.pyc index fc62c605..b5407e44 100644 Binary files a/.venv/Lib/site-packages/transformers/models/herbert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/herbert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/hubert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/hubert/__pycache__/__init__.cpython-311.pyc index 18fb5017..f70c4086 100644 Binary files a/.venv/Lib/site-packages/transformers/models/hubert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/hubert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/ibert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/ibert/__pycache__/__init__.cpython-311.pyc index 1abeb98b..f4c08457 100644 Binary files a/.venv/Lib/site-packages/transformers/models/ibert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/ibert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/idefics/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/idefics/__pycache__/__init__.cpython-311.pyc index 8e416b09..32aa2e1f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/idefics/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/idefics/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/idefics2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/idefics2/__pycache__/__init__.cpython-311.pyc index 20b27461..33b16ecc 100644 Binary files a/.venv/Lib/site-packages/transformers/models/idefics2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/idefics2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/imagegpt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/imagegpt/__pycache__/__init__.cpython-311.pyc index 2806a1bb..6c75a2cf 100644 Binary files a/.venv/Lib/site-packages/transformers/models/imagegpt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/imagegpt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/informer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/informer/__pycache__/__init__.cpython-311.pyc index 8854d8d6..5ed78518 100644 Binary files a/.venv/Lib/site-packages/transformers/models/informer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/informer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/instructblip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/instructblip/__pycache__/__init__.cpython-311.pyc index f418a80e..22d61945 100644 Binary files a/.venv/Lib/site-packages/transformers/models/instructblip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/instructblip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/jamba/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/jamba/__pycache__/__init__.cpython-311.pyc index 54c4532e..06543d69 100644 Binary files a/.venv/Lib/site-packages/transformers/models/jamba/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/jamba/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/jukebox/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/jukebox/__pycache__/__init__.cpython-311.pyc index 32baefd0..9859320b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/jukebox/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/jukebox/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/kosmos2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/kosmos2/__pycache__/__init__.cpython-311.pyc index 6a1ae0d1..40b90c27 100644 Binary files a/.venv/Lib/site-packages/transformers/models/kosmos2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/kosmos2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/layoutlm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/layoutlm/__pycache__/__init__.cpython-311.pyc index b495d501..96ee0198 100644 Binary files a/.venv/Lib/site-packages/transformers/models/layoutlm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/layoutlm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/layoutlmv2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/layoutlmv2/__pycache__/__init__.cpython-311.pyc index 8aa13bef..4d8f7042 100644 Binary files a/.venv/Lib/site-packages/transformers/models/layoutlmv2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/layoutlmv2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/layoutlmv3/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/layoutlmv3/__pycache__/__init__.cpython-311.pyc index 082cb9cd..37ddbe1f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/layoutlmv3/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/layoutlmv3/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/layoutxlm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/layoutxlm/__pycache__/__init__.cpython-311.pyc index 9b80fdb8..f12980f5 100644 Binary files a/.venv/Lib/site-packages/transformers/models/layoutxlm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/layoutxlm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/led/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/led/__pycache__/__init__.cpython-311.pyc index e7c278ad..b19ece91 100644 Binary files a/.venv/Lib/site-packages/transformers/models/led/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/led/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/levit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/levit/__pycache__/__init__.cpython-311.pyc index 68f737b1..8dffc921 100644 Binary files a/.venv/Lib/site-packages/transformers/models/levit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/levit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/lilt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/lilt/__pycache__/__init__.cpython-311.pyc index f91aa2ce..61eb17ec 100644 Binary files a/.venv/Lib/site-packages/transformers/models/lilt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/lilt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/llama/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/llama/__pycache__/__init__.cpython-311.pyc index 1df3e435..31fafa5b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/llama/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/llama/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/llava/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/llava/__pycache__/__init__.cpython-311.pyc index f5226e7a..dd54e59d 100644 Binary files a/.venv/Lib/site-packages/transformers/models/llava/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/llava/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/llava_next/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/llava_next/__pycache__/__init__.cpython-311.pyc index d1a22a0d..40224efc 100644 Binary files a/.venv/Lib/site-packages/transformers/models/llava_next/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/llava_next/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/longformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/longformer/__pycache__/__init__.cpython-311.pyc index 2f6fc2b9..b9e19911 100644 Binary files a/.venv/Lib/site-packages/transformers/models/longformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/longformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/longt5/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/longt5/__pycache__/__init__.cpython-311.pyc index bbf34757..d9afefdc 100644 Binary files a/.venv/Lib/site-packages/transformers/models/longt5/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/longt5/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/luke/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/luke/__pycache__/__init__.cpython-311.pyc index 42a769bc..8389751c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/luke/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/luke/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/lxmert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/lxmert/__pycache__/__init__.cpython-311.pyc index d640fdee..61198b4c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/lxmert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/lxmert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/m2m_100/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/m2m_100/__pycache__/__init__.cpython-311.pyc index 60df89c1..3f1db32d 100644 Binary files a/.venv/Lib/site-packages/transformers/models/m2m_100/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/m2m_100/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mamba/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mamba/__pycache__/__init__.cpython-311.pyc index 6a68980e..1a88dae2 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mamba/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mamba/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/marian/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/marian/__pycache__/__init__.cpython-311.pyc index cd2593a2..e3628f79 100644 Binary files a/.venv/Lib/site-packages/transformers/models/marian/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/marian/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/markuplm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/markuplm/__pycache__/__init__.cpython-311.pyc index 3872e9f0..184a2b17 100644 Binary files a/.venv/Lib/site-packages/transformers/models/markuplm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/markuplm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mask2former/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mask2former/__pycache__/__init__.cpython-311.pyc index f18180e5..253d01c4 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mask2former/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mask2former/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/maskformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/maskformer/__pycache__/__init__.cpython-311.pyc index 485b75c0..612f2fc7 100644 Binary files a/.venv/Lib/site-packages/transformers/models/maskformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/maskformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mbart/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mbart/__pycache__/__init__.cpython-311.pyc index fbeee8dd..898bcae3 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mbart/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mbart/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mbart50/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mbart50/__pycache__/__init__.cpython-311.pyc index 14991117..5d321d43 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mbart50/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mbart50/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mega/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mega/__pycache__/__init__.cpython-311.pyc index 9429e176..945bcb8c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mega/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mega/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/megatron_bert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/megatron_bert/__pycache__/__init__.cpython-311.pyc index c1922ee8..c6f141e8 100644 Binary files a/.venv/Lib/site-packages/transformers/models/megatron_bert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/megatron_bert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/megatron_gpt2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/megatron_gpt2/__pycache__/__init__.cpython-311.pyc index 24677c26..5698f191 100644 Binary files a/.venv/Lib/site-packages/transformers/models/megatron_gpt2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/megatron_gpt2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mgp_str/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mgp_str/__pycache__/__init__.cpython-311.pyc index 7639461e..51e58661 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mgp_str/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mgp_str/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mistral/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mistral/__pycache__/__init__.cpython-311.pyc index 4ef03332..45bba3ce 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mistral/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mistral/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mixtral/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mixtral/__pycache__/__init__.cpython-311.pyc index bb3fd1b8..db6f8921 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mixtral/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mixtral/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mluke/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mluke/__pycache__/__init__.cpython-311.pyc index fdd967b7..8a2d6ec3 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mluke/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mluke/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mobilebert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mobilebert/__pycache__/__init__.cpython-311.pyc index c097c68d..a8ad0bed 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mobilebert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mobilebert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mobilenet_v1/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mobilenet_v1/__pycache__/__init__.cpython-311.pyc index 2ced15c9..fe5ae56f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mobilenet_v1/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mobilenet_v1/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mobilenet_v2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mobilenet_v2/__pycache__/__init__.cpython-311.pyc index ffbdab60..7c7c4f89 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mobilenet_v2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mobilenet_v2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mobilevit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mobilevit/__pycache__/__init__.cpython-311.pyc index 72c84a3e..fb1e19c1 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mobilevit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mobilevit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mobilevitv2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mobilevitv2/__pycache__/__init__.cpython-311.pyc index 076cdc8b..f61a9511 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mobilevitv2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mobilevitv2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mpnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mpnet/__pycache__/__init__.cpython-311.pyc index 751f21ef..9106ce13 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mpnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mpnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mpt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mpt/__pycache__/__init__.cpython-311.pyc index 91924052..29712271 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mpt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mpt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mra/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mra/__pycache__/__init__.cpython-311.pyc index 2f72cb90..97574fbd 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mra/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mra/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mt5/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mt5/__pycache__/__init__.cpython-311.pyc index 3ccc39d5..a0ed677a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mt5/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mt5/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/musicgen/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/musicgen/__pycache__/__init__.cpython-311.pyc index 2c059b69..8c1e622e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/musicgen/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/musicgen/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/musicgen_melody/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/musicgen_melody/__pycache__/__init__.cpython-311.pyc index 93d5cf44..73f10d97 100644 Binary files a/.venv/Lib/site-packages/transformers/models/musicgen_melody/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/musicgen_melody/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/mvp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/mvp/__pycache__/__init__.cpython-311.pyc index 078dea14..b2af992c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/mvp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/mvp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/nat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/nat/__pycache__/__init__.cpython-311.pyc index 8a1cd036..40224c23 100644 Binary files a/.venv/Lib/site-packages/transformers/models/nat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/nat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/nezha/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/nezha/__pycache__/__init__.cpython-311.pyc index 831b8e6b..7852b380 100644 Binary files a/.venv/Lib/site-packages/transformers/models/nezha/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/nezha/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/nllb/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/nllb/__pycache__/__init__.cpython-311.pyc index 613326ae..2124ed80 100644 Binary files a/.venv/Lib/site-packages/transformers/models/nllb/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/nllb/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/nllb_moe/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/nllb_moe/__pycache__/__init__.cpython-311.pyc index c1e11681..6ca76ea2 100644 Binary files a/.venv/Lib/site-packages/transformers/models/nllb_moe/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/nllb_moe/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/nougat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/nougat/__pycache__/__init__.cpython-311.pyc index 4308f128..4cd3e8d3 100644 Binary files a/.venv/Lib/site-packages/transformers/models/nougat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/nougat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/nystromformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/nystromformer/__pycache__/__init__.cpython-311.pyc index 6fb426a3..e6551f3c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/nystromformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/nystromformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/olmo/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/olmo/__pycache__/__init__.cpython-311.pyc index 91c2c298..3b6cc2ae 100644 Binary files a/.venv/Lib/site-packages/transformers/models/olmo/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/olmo/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/oneformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/oneformer/__pycache__/__init__.cpython-311.pyc index b1f4fade..889ca3e5 100644 Binary files a/.venv/Lib/site-packages/transformers/models/oneformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/oneformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/openai/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/openai/__pycache__/__init__.cpython-311.pyc index 82ac9d24..7747c892 100644 Binary files a/.venv/Lib/site-packages/transformers/models/openai/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/openai/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/opt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/opt/__pycache__/__init__.cpython-311.pyc index 40253b3d..2c0be51e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/opt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/opt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/owlv2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/owlv2/__pycache__/__init__.cpython-311.pyc index d04e2322..8df15e60 100644 Binary files a/.venv/Lib/site-packages/transformers/models/owlv2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/owlv2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/owlvit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/owlvit/__pycache__/__init__.cpython-311.pyc index 03fb4f7d..943d5681 100644 Binary files a/.venv/Lib/site-packages/transformers/models/owlvit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/owlvit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/patchtsmixer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/patchtsmixer/__pycache__/__init__.cpython-311.pyc index 356d5e45..c3e6f870 100644 Binary files a/.venv/Lib/site-packages/transformers/models/patchtsmixer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/patchtsmixer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/patchtst/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/patchtst/__pycache__/__init__.cpython-311.pyc index 5f7ecfe7..9b9bb293 100644 Binary files a/.venv/Lib/site-packages/transformers/models/patchtst/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/patchtst/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/pegasus/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/pegasus/__pycache__/__init__.cpython-311.pyc index 822b96dd..79c57f3e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/pegasus/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/pegasus/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/pegasus_x/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/pegasus_x/__pycache__/__init__.cpython-311.pyc index e0f475bc..ae8650ab 100644 Binary files a/.venv/Lib/site-packages/transformers/models/pegasus_x/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/pegasus_x/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/perceiver/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/perceiver/__pycache__/__init__.cpython-311.pyc index 32771ce4..1c77dd01 100644 Binary files a/.venv/Lib/site-packages/transformers/models/perceiver/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/perceiver/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/persimmon/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/persimmon/__pycache__/__init__.cpython-311.pyc index f5a3b981..1eb27264 100644 Binary files a/.venv/Lib/site-packages/transformers/models/persimmon/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/persimmon/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/phi/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/phi/__pycache__/__init__.cpython-311.pyc index e8391dec..0c31d607 100644 Binary files a/.venv/Lib/site-packages/transformers/models/phi/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/phi/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/phobert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/phobert/__pycache__/__init__.cpython-311.pyc index 85a1de4d..4e4a49b0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/phobert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/phobert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/pix2struct/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/pix2struct/__pycache__/__init__.cpython-311.pyc index 40744cca..d482add9 100644 Binary files a/.venv/Lib/site-packages/transformers/models/pix2struct/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/pix2struct/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/plbart/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/plbart/__pycache__/__init__.cpython-311.pyc index 89fbf4b6..a4f8d7f6 100644 Binary files a/.venv/Lib/site-packages/transformers/models/plbart/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/plbart/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/poolformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/poolformer/__pycache__/__init__.cpython-311.pyc index 80bae3f8..8737200a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/poolformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/poolformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/pop2piano/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/pop2piano/__pycache__/__init__.cpython-311.pyc index c7087fad..907f3b88 100644 Binary files a/.venv/Lib/site-packages/transformers/models/pop2piano/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/pop2piano/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/prophetnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/prophetnet/__pycache__/__init__.cpython-311.pyc index 7903cb18..427656cd 100644 Binary files a/.venv/Lib/site-packages/transformers/models/prophetnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/prophetnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/pvt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/pvt/__pycache__/__init__.cpython-311.pyc index c8d8df33..65393c04 100644 Binary files a/.venv/Lib/site-packages/transformers/models/pvt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/pvt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/pvt_v2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/pvt_v2/__pycache__/__init__.cpython-311.pyc index b93614a3..fd90fccb 100644 Binary files a/.venv/Lib/site-packages/transformers/models/pvt_v2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/pvt_v2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/qdqbert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/qdqbert/__pycache__/__init__.cpython-311.pyc index c69b9b33..2885c821 100644 Binary files a/.venv/Lib/site-packages/transformers/models/qdqbert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/qdqbert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/qwen2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/qwen2/__pycache__/__init__.cpython-311.pyc index b0e81710..19001d7c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/qwen2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/qwen2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/qwen2_moe/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/qwen2_moe/__pycache__/__init__.cpython-311.pyc index 2d0c9132..5cb00cdc 100644 Binary files a/.venv/Lib/site-packages/transformers/models/qwen2_moe/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/qwen2_moe/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/rag/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/rag/__pycache__/__init__.cpython-311.pyc index 6d357f47..d5e2504f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/rag/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/rag/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/realm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/realm/__pycache__/__init__.cpython-311.pyc index 80134e93..e2e979f0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/realm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/realm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/recurrent_gemma/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/recurrent_gemma/__pycache__/__init__.cpython-311.pyc index ec4600c0..92e0c92e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/recurrent_gemma/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/recurrent_gemma/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/reformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/reformer/__pycache__/__init__.cpython-311.pyc index 409ebf7d..8137da34 100644 Binary files a/.venv/Lib/site-packages/transformers/models/reformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/reformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/regnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/regnet/__pycache__/__init__.cpython-311.pyc index 5584b70b..bae00409 100644 Binary files a/.venv/Lib/site-packages/transformers/models/regnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/regnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/rembert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/rembert/__pycache__/__init__.cpython-311.pyc index 3ac98005..e8c071ec 100644 Binary files a/.venv/Lib/site-packages/transformers/models/rembert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/rembert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/resnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/resnet/__pycache__/__init__.cpython-311.pyc index 9800c389..736bb9b1 100644 Binary files a/.venv/Lib/site-packages/transformers/models/resnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/resnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/roberta/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/roberta/__pycache__/__init__.cpython-311.pyc index 19d433a5..159a5947 100644 Binary files a/.venv/Lib/site-packages/transformers/models/roberta/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/roberta/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-311.pyc index 203ccab5..79594937 100644 Binary files a/.venv/Lib/site-packages/transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/roc_bert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/roc_bert/__pycache__/__init__.cpython-311.pyc index 8aed204d..4cced1af 100644 Binary files a/.venv/Lib/site-packages/transformers/models/roc_bert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/roc_bert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/roformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/roformer/__pycache__/__init__.cpython-311.pyc index 11bfd0fe..c2df40a6 100644 Binary files a/.venv/Lib/site-packages/transformers/models/roformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/roformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/rwkv/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/rwkv/__pycache__/__init__.cpython-311.pyc index a6b679ae..4cce8b61 100644 Binary files a/.venv/Lib/site-packages/transformers/models/rwkv/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/rwkv/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/sam/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/sam/__pycache__/__init__.cpython-311.pyc index 976d3473..81f6464a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/sam/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/sam/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/seamless_m4t/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/seamless_m4t/__pycache__/__init__.cpython-311.pyc index 97780ff5..74b466cd 100644 Binary files a/.venv/Lib/site-packages/transformers/models/seamless_m4t/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/seamless_m4t/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-311.pyc index 604bb7de..57cfa700 100644 Binary files a/.venv/Lib/site-packages/transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/segformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/segformer/__pycache__/__init__.cpython-311.pyc index 7ee426bb..d97f032e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/segformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/segformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/seggpt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/seggpt/__pycache__/__init__.cpython-311.pyc index 832452ab..7062da6c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/seggpt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/seggpt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/sew/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/sew/__pycache__/__init__.cpython-311.pyc index d6675052..f2dfa098 100644 Binary files a/.venv/Lib/site-packages/transformers/models/sew/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/sew/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/sew_d/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/sew_d/__pycache__/__init__.cpython-311.pyc index aad03165..e08e2bfc 100644 Binary files a/.venv/Lib/site-packages/transformers/models/sew_d/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/sew_d/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/siglip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/siglip/__pycache__/__init__.cpython-311.pyc index 5d1f22f9..6f7ad554 100644 Binary files a/.venv/Lib/site-packages/transformers/models/siglip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/siglip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-311.pyc index cbb822a2..94039968 100644 Binary files a/.venv/Lib/site-packages/transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/speech_to_text/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/speech_to_text/__pycache__/__init__.cpython-311.pyc index 7f462cbb..e26b0e65 100644 Binary files a/.venv/Lib/site-packages/transformers/models/speech_to_text/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/speech_to_text/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/speech_to_text_2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/speech_to_text_2/__pycache__/__init__.cpython-311.pyc index 9a211cc6..3a264fa1 100644 Binary files a/.venv/Lib/site-packages/transformers/models/speech_to_text_2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/speech_to_text_2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/speecht5/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/speecht5/__pycache__/__init__.cpython-311.pyc index 10c2a79f..a59c3e02 100644 Binary files a/.venv/Lib/site-packages/transformers/models/speecht5/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/speecht5/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/splinter/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/splinter/__pycache__/__init__.cpython-311.pyc index 232da8a1..8ef79415 100644 Binary files a/.venv/Lib/site-packages/transformers/models/splinter/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/splinter/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/squeezebert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/squeezebert/__pycache__/__init__.cpython-311.pyc index 6b2e1159..4268765b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/squeezebert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/squeezebert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/stablelm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/stablelm/__pycache__/__init__.cpython-311.pyc index d0f7bf82..b4eca9c3 100644 Binary files a/.venv/Lib/site-packages/transformers/models/stablelm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/stablelm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/starcoder2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/starcoder2/__pycache__/__init__.cpython-311.pyc index cdfb980b..0fb281ae 100644 Binary files a/.venv/Lib/site-packages/transformers/models/starcoder2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/starcoder2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/superpoint/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/superpoint/__pycache__/__init__.cpython-311.pyc index c220e8cd..cf45ca99 100644 Binary files a/.venv/Lib/site-packages/transformers/models/superpoint/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/superpoint/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/swiftformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/swiftformer/__pycache__/__init__.cpython-311.pyc index 0d0bd268..10f082e0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/swiftformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/swiftformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/swin/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/swin/__pycache__/__init__.cpython-311.pyc index 834c5305..6e9fab5f 100644 Binary files a/.venv/Lib/site-packages/transformers/models/swin/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/swin/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/swin2sr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/swin2sr/__pycache__/__init__.cpython-311.pyc index b37ba26a..104e0618 100644 Binary files a/.venv/Lib/site-packages/transformers/models/swin2sr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/swin2sr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/swinv2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/swinv2/__pycache__/__init__.cpython-311.pyc index 197867a4..7a0aa3bb 100644 Binary files a/.venv/Lib/site-packages/transformers/models/swinv2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/swinv2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/switch_transformers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/switch_transformers/__pycache__/__init__.cpython-311.pyc index 1fa54d52..5c3bb0e0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/switch_transformers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/switch_transformers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/t5/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/t5/__pycache__/__init__.cpython-311.pyc index ac7c9c49..9404fd82 100644 Binary files a/.venv/Lib/site-packages/transformers/models/t5/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/t5/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-311.pyc index 1fb951f4..a90138a1 100644 Binary files a/.venv/Lib/site-packages/transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/table_transformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/table_transformer/__pycache__/__init__.cpython-311.pyc index 90c0a3db..d8c6b69d 100644 Binary files a/.venv/Lib/site-packages/transformers/models/table_transformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/table_transformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/tapas/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/tapas/__pycache__/__init__.cpython-311.pyc index 32f5428b..2b2ce021 100644 Binary files a/.venv/Lib/site-packages/transformers/models/tapas/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/tapas/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/time_series_transformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/time_series_transformer/__pycache__/__init__.cpython-311.pyc index e16cecb7..edcba09b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/time_series_transformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/time_series_transformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/timesformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/timesformer/__pycache__/__init__.cpython-311.pyc index fb5c1268..8962bb09 100644 Binary files a/.venv/Lib/site-packages/transformers/models/timesformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/timesformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/timm_backbone/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/timm_backbone/__pycache__/__init__.cpython-311.pyc index 17e31048..ce651706 100644 Binary files a/.venv/Lib/site-packages/transformers/models/timm_backbone/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/timm_backbone/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/trocr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/trocr/__pycache__/__init__.cpython-311.pyc index 5bc360ff..18aed207 100644 Binary files a/.venv/Lib/site-packages/transformers/models/trocr/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/trocr/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/tvlt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/tvlt/__pycache__/__init__.cpython-311.pyc index aab1aa65..9e23be10 100644 Binary files a/.venv/Lib/site-packages/transformers/models/tvlt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/tvlt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/tvp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/tvp/__pycache__/__init__.cpython-311.pyc index 06b4e8bb..f4f854c0 100644 Binary files a/.venv/Lib/site-packages/transformers/models/tvp/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/tvp/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/udop/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/udop/__pycache__/__init__.cpython-311.pyc index 5d635193..b8f8a1be 100644 Binary files a/.venv/Lib/site-packages/transformers/models/udop/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/udop/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/umt5/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/umt5/__pycache__/__init__.cpython-311.pyc index 9cf8227e..2dc32b90 100644 Binary files a/.venv/Lib/site-packages/transformers/models/umt5/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/umt5/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/unispeech/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/unispeech/__pycache__/__init__.cpython-311.pyc index a27d4567..c1adb63e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/unispeech/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/unispeech/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/unispeech_sat/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/unispeech_sat/__pycache__/__init__.cpython-311.pyc index 77c6da73..c3463959 100644 Binary files a/.venv/Lib/site-packages/transformers/models/unispeech_sat/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/unispeech_sat/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/univnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/univnet/__pycache__/__init__.cpython-311.pyc index ca441bd8..d373b3ad 100644 Binary files a/.venv/Lib/site-packages/transformers/models/univnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/univnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/upernet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/upernet/__pycache__/__init__.cpython-311.pyc index a041221a..f99ca6c4 100644 Binary files a/.venv/Lib/site-packages/transformers/models/upernet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/upernet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/videomae/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/videomae/__pycache__/__init__.cpython-311.pyc index ef6cfd3c..ced6aa80 100644 Binary files a/.venv/Lib/site-packages/transformers/models/videomae/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/videomae/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vilt/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vilt/__pycache__/__init__.cpython-311.pyc index bb439763..e8370982 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vilt/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vilt/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vipllava/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vipllava/__pycache__/__init__.cpython-311.pyc index 7c358cb6..e4bc579c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vipllava/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vipllava/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-311.pyc index c8e47f8a..103ed8a7 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-311.pyc index 5a3eb102..b2f939ef 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/visual_bert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/visual_bert/__pycache__/__init__.cpython-311.pyc index 13732eff..c2ef1739 100644 Binary files a/.venv/Lib/site-packages/transformers/models/visual_bert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/visual_bert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vit/__pycache__/__init__.cpython-311.pyc index 6fe91faf..c7e388ab 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vit_hybrid/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vit_hybrid/__pycache__/__init__.cpython-311.pyc index fed3927e..20faeb40 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vit_hybrid/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vit_hybrid/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vit_mae/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vit_mae/__pycache__/__init__.cpython-311.pyc index fd08b141..eee7e56c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vit_mae/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vit_mae/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vit_msn/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vit_msn/__pycache__/__init__.cpython-311.pyc index df5a31ab..1d2bff04 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vit_msn/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vit_msn/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vitdet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vitdet/__pycache__/__init__.cpython-311.pyc index 88682abe..f12adbc7 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vitdet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vitdet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vitmatte/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vitmatte/__pycache__/__init__.cpython-311.pyc index b451120b..e23bbc9e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vitmatte/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vitmatte/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vits/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vits/__pycache__/__init__.cpython-311.pyc index d31916a3..ee27f8a5 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vits/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vits/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/vivit/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/vivit/__pycache__/__init__.cpython-311.pyc index 9e6ec9e1..808a42c5 100644 Binary files a/.venv/Lib/site-packages/transformers/models/vivit/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/vivit/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/wav2vec2/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/wav2vec2/__pycache__/__init__.cpython-311.pyc index a8eb7276..347e2d46 100644 Binary files a/.venv/Lib/site-packages/transformers/models/wav2vec2/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/wav2vec2/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-311.pyc index a3eb73ca..2f661e83 100644 Binary files a/.venv/Lib/site-packages/transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-311.pyc index 4a591fde..1fb15744 100644 Binary files a/.venv/Lib/site-packages/transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-311.pyc index 4ae6675e..d5dea99e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-311.pyc index a839e13e..faea4755 100644 Binary files a/.venv/Lib/site-packages/transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/wavlm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/wavlm/__pycache__/__init__.cpython-311.pyc index b523fa00..be351421 100644 Binary files a/.venv/Lib/site-packages/transformers/models/wavlm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/wavlm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/whisper/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/whisper/__pycache__/__init__.cpython-311.pyc index a1eb3339..b43cf35a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/whisper/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/whisper/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/x_clip/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/x_clip/__pycache__/__init__.cpython-311.pyc index d63e7f23..ef45967b 100644 Binary files a/.venv/Lib/site-packages/transformers/models/x_clip/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/x_clip/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/xglm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/xglm/__pycache__/__init__.cpython-311.pyc index 7b95ac7f..38f1856e 100644 Binary files a/.venv/Lib/site-packages/transformers/models/xglm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/xglm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/xlm/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/xlm/__pycache__/__init__.cpython-311.pyc index 9dced247..f8cd3a41 100644 Binary files a/.venv/Lib/site-packages/transformers/models/xlm/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/xlm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/xlm_prophetnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/xlm_prophetnet/__pycache__/__init__.cpython-311.pyc index d0bd50e9..13bf9278 100644 Binary files a/.venv/Lib/site-packages/transformers/models/xlm_prophetnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/xlm_prophetnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/xlm_roberta/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/xlm_roberta/__pycache__/__init__.cpython-311.pyc index 47f1e665..556fd02c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/xlm_roberta/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/xlm_roberta/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-311.pyc index bf53283f..a3c509e9 100644 Binary files a/.venv/Lib/site-packages/transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/xlnet/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/xlnet/__pycache__/__init__.cpython-311.pyc index 48c2cff4..3177360c 100644 Binary files a/.venv/Lib/site-packages/transformers/models/xlnet/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/xlnet/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/xmod/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/xmod/__pycache__/__init__.cpython-311.pyc index 634d8e9d..e97f9c8a 100644 Binary files a/.venv/Lib/site-packages/transformers/models/xmod/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/xmod/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/yolos/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/yolos/__pycache__/__init__.cpython-311.pyc index 21736e63..3356ecd4 100644 Binary files a/.venv/Lib/site-packages/transformers/models/yolos/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/yolos/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/models/yoso/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/models/yoso/__pycache__/__init__.cpython-311.pyc index 88b5914d..189b49a8 100644 Binary files a/.venv/Lib/site-packages/transformers/models/yoso/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/models/yoso/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/onnx/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/onnx/__pycache__/__init__.cpython-311.pyc index 4353dace..c4196f4d 100644 Binary files a/.venv/Lib/site-packages/transformers/onnx/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/onnx/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/onnx/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/transformers/onnx/__pycache__/config.cpython-311.pyc index 56fb0e6a..538da290 100644 Binary files a/.venv/Lib/site-packages/transformers/onnx/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/onnx/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/onnx/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/onnx/__pycache__/utils.cpython-311.pyc index b40cc578..4359a1ac 100644 Binary files a/.venv/Lib/site-packages/transformers/onnx/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/onnx/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/__init__.cpython-311.pyc index dd31d6f2..2258e188 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/auto.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/auto.cpython-311.pyc index 2dffad0d..7738dc0d 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/auto.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/auto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/base.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/base.cpython-311.pyc index 35a82264..968c7f6e 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/base.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/base.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_aqlm.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_aqlm.cpython-311.pyc index 72d08dc1..bab50122 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_aqlm.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_aqlm.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_awq.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_awq.cpython-311.pyc index 4bb24508..e0b7f11b 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_awq.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_awq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-311.pyc index a34747ac..f482a21d 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-311.pyc index c98cceaa..8386dc46 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_gptq.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_gptq.cpython-311.pyc index aefff5e3..51639bcf 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_gptq.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_gptq.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_quanto.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_quanto.cpython-311.pyc index e3ad441a..22c51b9a 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_quanto.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizer_quanto.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizers_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizers_utils.cpython-311.pyc index 02835b88..ccf55f48 100644 Binary files a/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizers_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/quantizers/__pycache__/quantizers_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/__init__.cpython-311.pyc index cb9024b8..2d150c09 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/backbone_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/backbone_utils.cpython-311.pyc index 126aabf4..66e9cae6 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/backbone_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/backbone_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/constants.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/constants.cpython-311.pyc index 303907c8..f0615da0 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/constants.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/constants.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/doc.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/doc.cpython-311.pyc index b68c8189..97c2c0d8 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/doc.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/doc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-311.pyc index 52c400ad..e751daa1 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_flax_objects.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_flax_objects.cpython-311.pyc index 53fe4cec..978a2ed5 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_flax_objects.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_flax_objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_keras_nlp_objects.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_keras_nlp_objects.cpython-311.pyc index dd192a40..be515d1c 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_keras_nlp_objects.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_keras_nlp_objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-311.pyc index f8163ac1..f74eb22e 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-311.pyc index bc90e2d0..a5ea0051 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-311.pyc index 21c80564..060d5b3c 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tf_objects.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tf_objects.cpython-311.pyc index b7803f9b..c1bec3bf 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tf_objects.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/dummy_tf_objects.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/generic.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/generic.cpython-311.pyc index 36d5ec36..5d0dd986 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/generic.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/generic.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/hub.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/hub.cpython-311.pyc index f69f68bf..7acd47f1 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/hub.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/hub.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/import_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/import_utils.cpython-311.pyc index 38d6d501..85f26b7a 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/import_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/import_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/logging.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/logging.cpython-311.pyc index 9c69daee..fd2dcfde 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/logging.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/logging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/model_parallel_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/model_parallel_utils.cpython-311.pyc index e4554a75..cd38abd5 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/model_parallel_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/model_parallel_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/peft_utils.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/peft_utils.cpython-311.pyc index 9f1336c7..7e438bd9 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/peft_utils.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/peft_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/quantization_config.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/quantization_config.cpython-311.pyc index 8efe63d2..cb648ccf 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/quantization_config.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/quantization_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/transformers/utils/__pycache__/versions.cpython-311.pyc b/.venv/Lib/site-packages/transformers/utils/__pycache__/versions.cpython-311.pyc index fd907d4a..f9a9c5a6 100644 Binary files a/.venv/Lib/site-packages/transformers/utils/__pycache__/versions.cpython-311.pyc and b/.venv/Lib/site-packages/transformers/utils/__pycache__/versions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/__init__.cpython-311.pyc index dbcd9564..f6ef1271 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_checkers.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_checkers.cpython-311.pyc index cd7de04a..cad9c0a0 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_checkers.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_checkers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_config.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_config.cpython-311.pyc index 380824a3..162f9efb 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_config.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_decorators.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_decorators.cpython-311.pyc index d041fd90..1daf1de5 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_decorators.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_decorators.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_exceptions.cpython-311.pyc index 4f2b55ce..1d97d408 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_functions.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_functions.cpython-311.pyc index af47f7c8..50ba63ad 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_functions.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_functions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_importhook.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_importhook.cpython-311.pyc index 479ed109..d6813278 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_importhook.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_importhook.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_memo.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_memo.cpython-311.pyc index 3e0b0ce2..931cc2cb 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_memo.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_memo.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_suppression.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_suppression.cpython-311.pyc index a86f8649..f66f1420 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_suppression.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_suppression.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_transformer.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_transformer.cpython-311.pyc index 459ec296..5bb1d85e 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_transformer.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_transformer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typeguard/__pycache__/_utils.cpython-311.pyc b/.venv/Lib/site-packages/typeguard/__pycache__/_utils.cpython-311.pyc index 3a2da8b9..6eadd132 100644 Binary files a/.venv/Lib/site-packages/typeguard/__pycache__/_utils.cpython-311.pyc and b/.venv/Lib/site-packages/typeguard/__pycache__/_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/__init__.cpython-311.pyc index 230b61c8..bf0a22e2 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/_compat_utils.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/_compat_utils.cpython-311.pyc index 38367d30..98065fee 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/_compat_utils.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/_compat_utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/_completion_shared.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/_completion_shared.cpython-311.pyc index c0c30870..d6c695d0 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/_completion_shared.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/_completion_shared.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/_typing.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/_typing.cpython-311.pyc index 34a5459f..ab37599f 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/_typing.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/_typing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/colors.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/colors.cpython-311.pyc index cde9ccfe..ea0c9944 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/colors.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/colors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/completion.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/completion.cpython-311.pyc index 836d8104..ef641e4f 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/completion.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/completion.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/core.cpython-311.pyc index 1c11fb0d..04c7a7d3 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/core.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/core.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/main.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/main.cpython-311.pyc index 73e83d6f..35c6288f 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/main.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/models.cpython-311.pyc index 3cb054c5..cecd3880 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/models.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/models.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/params.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/params.cpython-311.pyc index 21de567c..858c074d 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/params.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/params.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/typer/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/typer/__pycache__/utils.cpython-311.pyc index 9e1e72bb..1bd0c141 100644 Binary files a/.venv/Lib/site-packages/typer/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/typer/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tzlocal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/tzlocal/__pycache__/__init__.cpython-311.pyc index c4d00663..5c3185c4 100644 Binary files a/.venv/Lib/site-packages/tzlocal/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/tzlocal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tzlocal/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/tzlocal/__pycache__/utils.cpython-311.pyc index e5d6aaf0..4088f17b 100644 Binary files a/.venv/Lib/site-packages/tzlocal/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/tzlocal/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tzlocal/__pycache__/win32.cpython-311.pyc b/.venv/Lib/site-packages/tzlocal/__pycache__/win32.cpython-311.pyc index 4a055f40..5de4d102 100644 Binary files a/.venv/Lib/site-packages/tzlocal/__pycache__/win32.cpython-311.pyc and b/.venv/Lib/site-packages/tzlocal/__pycache__/win32.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/tzlocal/__pycache__/windows_tz.cpython-311.pyc b/.venv/Lib/site-packages/tzlocal/__pycache__/windows_tz.cpython-311.pyc index 9069a38d..bfd91105 100644 Binary files a/.venv/Lib/site-packages/tzlocal/__pycache__/windows_tz.cpython-311.pyc and b/.venv/Lib/site-packages/tzlocal/__pycache__/windows_tz.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/__init__.cpython-311.pyc index 9701a26e..76d1ce70 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/_base_connection.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/_base_connection.cpython-311.pyc index a819096c..4ccae680 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/_base_connection.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/_base_connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/_collections.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/_collections.cpython-311.pyc index 4497f56d..fc983a0f 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/_collections.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/_collections.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/_request_methods.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/_request_methods.cpython-311.pyc index 5aae1429..4f182c48 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/_request_methods.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/_request_methods.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/_version.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/_version.cpython-311.pyc index b37df7ba..2bec69fa 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/_version.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/_version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/connection.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/connection.cpython-311.pyc index a1f3219c..5267c326 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/connection.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/connectionpool.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/connectionpool.cpython-311.pyc index 17ae8b4c..724d9ff1 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/connectionpool.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/connectionpool.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/exceptions.cpython-311.pyc index 880626c8..cf6a3ef5 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/exceptions.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/exceptions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/fields.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/fields.cpython-311.pyc index bca63c66..47daa756 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/fields.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/fields.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/filepost.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/filepost.cpython-311.pyc index dd2a97ac..548f2813 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/filepost.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/filepost.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/poolmanager.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/poolmanager.cpython-311.pyc index 49b63f98..2502eb5e 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/poolmanager.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/poolmanager.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/__pycache__/response.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/__pycache__/response.cpython-311.pyc index 41cea870..813de05b 100644 Binary files a/.venv/Lib/site-packages/urllib3/__pycache__/response.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/__pycache__/response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/contrib/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/contrib/__pycache__/__init__.cpython-311.pyc index db53d3bd..17c765a4 100644 Binary files a/.venv/Lib/site-packages/urllib3/contrib/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/contrib/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/contrib/__pycache__/socks.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/contrib/__pycache__/socks.cpython-311.pyc index 840e7f5c..b816166a 100644 Binary files a/.venv/Lib/site-packages/urllib3/contrib/__pycache__/socks.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/contrib/__pycache__/socks.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/__init__.cpython-311.pyc index 090ba267..e8572c6a 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/connection.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/connection.cpython-311.pyc index eef1634e..930378b0 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/connection.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/connection.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/proxy.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/proxy.cpython-311.pyc index 974a1dc1..44360335 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/proxy.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/proxy.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/request.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/request.cpython-311.pyc index 69313b29..68f1abff 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/request.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/request.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/response.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/response.cpython-311.pyc index c12f6ce1..acfad2ea 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/response.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/response.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/retry.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/retry.cpython-311.pyc index 4baba59c..1bf20018 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/retry.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/retry.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_.cpython-311.pyc index c27ee421..dcd9b776 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc index 26b05a3f..1c218598 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/ssltransport.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/ssltransport.cpython-311.pyc index 41e01db7..3d0f808f 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/ssltransport.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/ssltransport.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/timeout.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/timeout.cpython-311.pyc index 3835bc6f..9233a997 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/timeout.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/timeout.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/url.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/url.cpython-311.pyc index 2d78fe13..4f826b13 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/url.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/url.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/util.cpython-311.pyc index 312817e1..19bd2584 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/urllib3/util/__pycache__/wait.cpython-311.pyc b/.venv/Lib/site-packages/urllib3/util/__pycache__/wait.cpython-311.pyc index 9113cf61..a72da4e8 100644 Binary files a/.venv/Lib/site-packages/urllib3/util/__pycache__/wait.cpython-311.pyc and b/.venv/Lib/site-packages/urllib3/util/__pycache__/wait.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wasabi/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/wasabi/__pycache__/__init__.cpython-311.pyc index 82a8555f..cdc44e12 100644 Binary files a/.venv/Lib/site-packages/wasabi/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/wasabi/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wasabi/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/wasabi/__pycache__/compat.cpython-311.pyc index 742bd40e..ce426368 100644 Binary files a/.venv/Lib/site-packages/wasabi/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/wasabi/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wasabi/__pycache__/markdown.cpython-311.pyc b/.venv/Lib/site-packages/wasabi/__pycache__/markdown.cpython-311.pyc index 34a823bb..46dd0b89 100644 Binary files a/.venv/Lib/site-packages/wasabi/__pycache__/markdown.cpython-311.pyc and b/.venv/Lib/site-packages/wasabi/__pycache__/markdown.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wasabi/__pycache__/printer.cpython-311.pyc b/.venv/Lib/site-packages/wasabi/__pycache__/printer.cpython-311.pyc index e5c89df9..a9250e22 100644 Binary files a/.venv/Lib/site-packages/wasabi/__pycache__/printer.cpython-311.pyc and b/.venv/Lib/site-packages/wasabi/__pycache__/printer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wasabi/__pycache__/tables.cpython-311.pyc b/.venv/Lib/site-packages/wasabi/__pycache__/tables.cpython-311.pyc index f7ce15ba..d4b0052b 100644 Binary files a/.venv/Lib/site-packages/wasabi/__pycache__/tables.cpython-311.pyc and b/.venv/Lib/site-packages/wasabi/__pycache__/tables.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wasabi/__pycache__/traceback_printer.cpython-311.pyc b/.venv/Lib/site-packages/wasabi/__pycache__/traceback_printer.cpython-311.pyc index 85242c9d..fb8731f2 100644 Binary files a/.venv/Lib/site-packages/wasabi/__pycache__/traceback_printer.cpython-311.pyc and b/.venv/Lib/site-packages/wasabi/__pycache__/traceback_printer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wasabi/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/wasabi/__pycache__/util.cpython-311.pyc index 8f62810b..7c473724 100644 Binary files a/.venv/Lib/site-packages/wasabi/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/wasabi/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/weasel/__pycache__/__init__.cpython-311.pyc index 9fc8ca69..57d6f8ed 100644 Binary files a/.venv/Lib/site-packages/weasel/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/__pycache__/about.cpython-311.pyc b/.venv/Lib/site-packages/weasel/__pycache__/about.cpython-311.pyc index 990e009e..7e29f658 100644 Binary files a/.venv/Lib/site-packages/weasel/__pycache__/about.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/__pycache__/about.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/__pycache__/compat.cpython-311.pyc b/.venv/Lib/site-packages/weasel/__pycache__/compat.cpython-311.pyc index 79c06f23..61a41ddf 100644 Binary files a/.venv/Lib/site-packages/weasel/__pycache__/compat.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/__pycache__/compat.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/weasel/__pycache__/errors.cpython-311.pyc index cb7b995a..c3f350e4 100644 Binary files a/.venv/Lib/site-packages/weasel/__pycache__/errors.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/__pycache__/errors.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/__pycache__/schemas.cpython-311.pyc b/.venv/Lib/site-packages/weasel/__pycache__/schemas.cpython-311.pyc index 2a5d1479..6905d4d6 100644 Binary files a/.venv/Lib/site-packages/weasel/__pycache__/schemas.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/__pycache__/schemas.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/__init__.cpython-311.pyc index 0e66c20a..727f1c31 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/assets.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/assets.cpython-311.pyc index e2dcd06c..debfa523 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/assets.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/assets.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/clone.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/clone.cpython-311.pyc index f9b13383..2644735e 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/clone.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/clone.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/document.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/document.cpython-311.pyc index 3f872657..daa08eed 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/document.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/document.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/dvc.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/dvc.cpython-311.pyc index 9cc4c928..9845c823 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/dvc.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/dvc.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/main.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/main.cpython-311.pyc index c6f591aa..bf825975 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/main.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/main.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/pull.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/pull.cpython-311.pyc index 47e6ab53..72ccb408 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/pull.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/pull.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/push.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/push.cpython-311.pyc index 7f7c9e3c..5987a566 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/push.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/push.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/remote_storage.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/remote_storage.cpython-311.pyc index 8ab4b986..bbad6932 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/remote_storage.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/remote_storage.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/cli/__pycache__/run.cpython-311.pyc b/.venv/Lib/site-packages/weasel/cli/__pycache__/run.cpython-311.pyc index 97d3ed22..b84b39ae 100644 Binary files a/.venv/Lib/site-packages/weasel/cli/__pycache__/run.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/cli/__pycache__/run.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/__init__.cpython-311.pyc index 450ba215..84bc08be 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/commands.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/commands.cpython-311.pyc index 0d429c87..6aa74473 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/commands.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/commands.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/config.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/config.cpython-311.pyc index 78002cc2..2d23f086 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/config.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/config.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/environment.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/environment.cpython-311.pyc index 6f77d98a..e4c7b227 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/environment.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/environment.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/filesystem.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/filesystem.cpython-311.pyc index 916a9c66..85dd618c 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/filesystem.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/filesystem.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/frozen.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/frozen.cpython-311.pyc index 9b5367c3..4c341482 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/frozen.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/frozen.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/git.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/git.cpython-311.pyc index d64025f0..cad29700 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/git.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/git.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/hashing.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/hashing.cpython-311.pyc index 34fe2d0f..4d6883ea 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/hashing.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/hashing.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/logging.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/logging.cpython-311.pyc index aea72e8b..e312a081 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/logging.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/logging.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/modules.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/modules.cpython-311.pyc index c84b890a..1786d3a7 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/modules.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/modules.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/remote.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/remote.cpython-311.pyc index 28dc8f7b..1ff7c144 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/remote.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/remote.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/validation.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/validation.cpython-311.pyc index ebfa4d78..f502236d 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/validation.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/validation.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/weasel/util/__pycache__/versions.cpython-311.pyc b/.venv/Lib/site-packages/weasel/util/__pycache__/versions.cpython-311.pyc index a47d50e5..7576042f 100644 Binary files a/.venv/Lib/site-packages/weasel/util/__pycache__/versions.cpython-311.pyc and b/.venv/Lib/site-packages/weasel/util/__pycache__/versions.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/wheel/__pycache__/__init__.cpython-311.pyc index a60a7a9c..a087aee3 100644 Binary files a/.venv/Lib/site-packages/wheel/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/__pycache__/bdist_wheel.cpython-311.pyc b/.venv/Lib/site-packages/wheel/__pycache__/bdist_wheel.cpython-311.pyc index ec292051..1a607c0a 100644 Binary files a/.venv/Lib/site-packages/wheel/__pycache__/bdist_wheel.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/__pycache__/bdist_wheel.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/__pycache__/macosx_libfile.cpython-311.pyc b/.venv/Lib/site-packages/wheel/__pycache__/macosx_libfile.cpython-311.pyc index dc396ba7..4ee1dc02 100644 Binary files a/.venv/Lib/site-packages/wheel/__pycache__/macosx_libfile.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/__pycache__/macosx_libfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/__pycache__/metadata.cpython-311.pyc b/.venv/Lib/site-packages/wheel/__pycache__/metadata.cpython-311.pyc index 04f5d1cd..579625b0 100644 Binary files a/.venv/Lib/site-packages/wheel/__pycache__/metadata.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/__pycache__/metadata.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/__pycache__/util.cpython-311.pyc b/.venv/Lib/site-packages/wheel/__pycache__/util.cpython-311.pyc index fac38979..5d204376 100644 Binary files a/.venv/Lib/site-packages/wheel/__pycache__/util.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/__pycache__/util.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/__pycache__/wheelfile.cpython-311.pyc b/.venv/Lib/site-packages/wheel/__pycache__/wheelfile.cpython-311.pyc index c01abbfe..30d6afd2 100644 Binary files a/.venv/Lib/site-packages/wheel/__pycache__/wheelfile.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/__pycache__/wheelfile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/cli/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/wheel/cli/__pycache__/__init__.cpython-311.pyc index 64cba783..47165b8d 100644 Binary files a/.venv/Lib/site-packages/wheel/cli/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/__pycache__/__init__.cpython-311.pyc index 474b5f68..313fd144 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/__init__.cpython-311.pyc index 9c62b43f..7c2636f4 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_elffile.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_elffile.cpython-311.pyc index 0bbc9c3e..7b0dc994 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_elffile.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_elffile.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_manylinux.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_manylinux.cpython-311.pyc index 66cc8947..1f8b0661 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_manylinux.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_manylinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_musllinux.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_musllinux.cpython-311.pyc index 9be4edbf..ad8d4461 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_musllinux.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_musllinux.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_parser.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_parser.cpython-311.pyc index 520bee44..25bff36e 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_parser.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_structures.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_structures.cpython-311.pyc index 4796c168..0a6d7d4a 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_structures.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-311.pyc index 5353fff2..4920bd5b 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/markers.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/markers.cpython-311.pyc index a5536b34..46c20b80 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/markers.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/markers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/requirements.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/requirements.cpython-311.pyc index 09cbf51a..6ffd0296 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/requirements.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/requirements.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/specifiers.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/specifiers.cpython-311.pyc index 8101b224..ef910a39 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/specifiers.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/specifiers.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/tags.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/tags.cpython-311.pyc index a01d96ca..c7e6bf31 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/tags.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/tags.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/utils.cpython-311.pyc index da506905..c757d7f7 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/utils.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/version.cpython-311.pyc b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/version.cpython-311.pyc index 01d29005..01361be1 100644 Binary files a/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/version.cpython-311.pyc and b/.venv/Lib/site-packages/wheel/vendored/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/__init__.cpython-311.pyc index 707a949d..d4f171fe 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/composer.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/composer.cpython-311.pyc index c845f06c..bd29c28f 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/composer.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/composer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/constructor.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/constructor.cpython-311.pyc index ddb9c0e2..d092b0be 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/constructor.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/constructor.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/cyaml.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/cyaml.cpython-311.pyc index bc571dbb..1d214d45 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/cyaml.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/cyaml.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/dumper.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/dumper.cpython-311.pyc index 283fc91e..09a8d52e 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/dumper.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/dumper.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/emitter.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/emitter.cpython-311.pyc index a6e4dc7f..b93611e8 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/emitter.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/emitter.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/error.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/error.cpython-311.pyc index 3229996e..2fe9fa06 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/error.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/error.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/events.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/events.cpython-311.pyc index afaf44d3..28dffa47 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/events.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/events.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/loader.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/loader.cpython-311.pyc index 308d204e..61e86cb0 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/loader.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/loader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/nodes.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/nodes.cpython-311.pyc index 9cc40364..6a74cfe0 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/nodes.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/nodes.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/parser.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/parser.cpython-311.pyc index 249d10e8..fcf71bad 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/parser.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/parser.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/reader.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/reader.cpython-311.pyc index 83a3cf52..697fa3d3 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/reader.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/reader.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/representer.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/representer.cpython-311.pyc index 32a8d90a..8f06474b 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/representer.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/representer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/resolver.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/resolver.cpython-311.pyc index 131117c4..0d52fe52 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/resolver.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/resolver.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/scanner.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/scanner.cpython-311.pyc index ff9a5061..bea386b4 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/scanner.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/scanner.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/serializer.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/serializer.cpython-311.pyc index ee4cda35..573d0955 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/serializer.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/serializer.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yaml/__pycache__/tokens.cpython-311.pyc b/.venv/Lib/site-packages/yaml/__pycache__/tokens.cpython-311.pyc index 05b0d621..34d2a0bc 100644 Binary files a/.venv/Lib/site-packages/yaml/__pycache__/tokens.cpython-311.pyc and b/.venv/Lib/site-packages/yaml/__pycache__/tokens.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yarl/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/yarl/__pycache__/__init__.cpython-311.pyc index 4f059cf2..a67cdc0c 100644 Binary files a/.venv/Lib/site-packages/yarl/__pycache__/__init__.cpython-311.pyc and b/.venv/Lib/site-packages/yarl/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yarl/__pycache__/_quoting.cpython-311.pyc b/.venv/Lib/site-packages/yarl/__pycache__/_quoting.cpython-311.pyc index fb21dac4..421fcd9b 100644 Binary files a/.venv/Lib/site-packages/yarl/__pycache__/_quoting.cpython-311.pyc and b/.venv/Lib/site-packages/yarl/__pycache__/_quoting.cpython-311.pyc differ diff --git a/.venv/Lib/site-packages/yarl/__pycache__/_url.cpython-311.pyc b/.venv/Lib/site-packages/yarl/__pycache__/_url.cpython-311.pyc index 7db7fda9..16800481 100644 Binary files a/.venv/Lib/site-packages/yarl/__pycache__/_url.cpython-311.pyc and b/.venv/Lib/site-packages/yarl/__pycache__/_url.cpython-311.pyc differ diff --git a/.venv/Scripts/distro.exe b/.venv/Scripts/distro.exe new file mode 100644 index 00000000..999a75e0 Binary files /dev/null and b/.venv/Scripts/distro.exe differ diff --git a/.venv/Scripts/httpx.exe b/.venv/Scripts/httpx.exe new file mode 100644 index 00000000..ac8f2883 Binary files /dev/null and b/.venv/Scripts/httpx.exe differ diff --git a/.venv/Scripts/imageio_download_bin.exe b/.venv/Scripts/imageio_download_bin.exe new file mode 100644 index 00000000..18639a13 Binary files /dev/null and b/.venv/Scripts/imageio_download_bin.exe differ diff --git a/.venv/Scripts/imageio_remove_bin.exe b/.venv/Scripts/imageio_remove_bin.exe new file mode 100644 index 00000000..dce23fea Binary files /dev/null and b/.venv/Scripts/imageio_remove_bin.exe differ diff --git a/.venv/Scripts/openai.exe b/.venv/Scripts/openai.exe new file mode 100644 index 00000000..17a17ed0 Binary files /dev/null and b/.venv/Scripts/openai.exe differ diff --git a/Arabic.mp3 b/Arabic.mp3 new file mode 100644 index 00000000..bfc73d3d Binary files /dev/null and b/Arabic.mp3 differ diff --git a/Arabic1.mp3 b/Arabic1.mp3 new file mode 100644 index 00000000..6cbc8a00 Binary files /dev/null and b/Arabic1.mp3 differ diff --git a/Arabic2.mp3 b/Arabic2.mp3 new file mode 100644 index 00000000..3e1e36f1 Binary files /dev/null and b/Arabic2.mp3 differ diff --git a/Arabic3.mp3 b/Arabic3.mp3 new file mode 100644 index 00000000..98a5f81d Binary files /dev/null and b/Arabic3.mp3 differ diff --git a/Arabic4.mp3 b/Arabic4.mp3 new file mode 100644 index 00000000..5e9f1921 Binary files /dev/null and b/Arabic4.mp3 differ diff --git a/Arabic5.mp3 b/Arabic5.mp3 new file mode 100644 index 00000000..847ce986 Binary files /dev/null and b/Arabic5.mp3 differ diff --git a/Combined.py b/Combined.py new file mode 100644 index 00000000..6b6f82fd --- /dev/null +++ b/Combined.py @@ -0,0 +1,28 @@ +from moviepy.editor import VideoFileClip, AudioFileClip +import random + +def combine_audio_video(audio_file, video_file, output_file): + # Load the audio and video files + audio = AudioFileClip(audio_file) + video = VideoFileClip(video_file) + + # Get the duration of the audio and video files + audio_duration = audio.duration + video_duration = video.duration + + # Generate a random start time for the video clip + start_time = random.uniform(0, video_duration - audio_duration) + + # Trim the video to match the audio duration and start from the random point + trimmed_video = video.subclip(start_time, start_time + audio_duration) + + # Combine the trimmed video and audio + final_clip = trimmed_video.set_audio(audio) + + # Write the combined clip to a new video file + final_clip.write_videofile(output_file) + + # Close the audio and video files + audio.close() + video.close() + final_clip.close() diff --git a/__pycache__/Combined.cpython-311.pyc b/__pycache__/Combined.cpython-311.pyc new file mode 100644 index 00000000..441c6601 Binary files /dev/null and b/__pycache__/Combined.cpython-311.pyc differ diff --git a/__pycache__/xtts.cpython-311.pyc b/__pycache__/xtts.cpython-311.pyc new file mode 100644 index 00000000..cdac5981 Binary files /dev/null and b/__pycache__/xtts.cpython-311.pyc differ diff --git a/combined_video.mp4 b/combined_video.mp4 new file mode 100644 index 00000000..355df94a --- /dev/null +++ b/combined_video.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9738cde63194957609a1b7639c561b30d4c7126473740d641436629b61e9118d +size 125784374 diff --git a/m3rof.ogg b/m3rof.ogg deleted file mode 100644 index 5801624a..00000000 Binary files a/m3rof.ogg and /dev/null differ diff --git a/main.py b/main.py new file mode 100644 index 00000000..3c799629 --- /dev/null +++ b/main.py @@ -0,0 +1,7 @@ +from Combined import combine_audio_video +from xtts import text_to_speech + + +text_to_speech("The Scratching in the Walls It started with faint sounds in the dead of night. A soft scratching that seemed to come from inside the walls of the old house. At first, Sarah dismissed it as merely the settling of the ancient wooden beams. But the noises grew louder and more frequent with each passing evening. Scratch...scratch...scratch... The skittering sounds would come from all around her bedroom - first from one wall, then another, then the ceiling above. Sarah would lie awake, covers pulled tightly around her, trying to pinpoint the origin of the unsettling noises. Her heart would pound in her ears as her mind raced with dreadful possibilities. Rats in the walls, she tried to rationalize. Just rodents scurrying about, nothing more. But the sounds seemed too deliberate, too purposeful to be mere rats foraging. It was almost as if something was trying to work its way through the wooden slats, clawing mercilessly to get inside. One brutally hot summer night, Sarah awoke to the unmistakable sounds of splintering wood. Petrified, she watched in horror as a panel in her closet began to buckle and crack outward. Elongated black appendages seeped through the fractures, followed by rows of wriggling legs and a disgustingly misshapen body. It came scrabbling out of the wall, an ungodly hybrid of insect and arachnid, its multi-faceted eyes glittering with primal hunger. The grotesque creature opened a circular maw of serrated mandibles, emitting a deafening screech. Sarah's terrified screams pierced the night as the abomination lurched across the floor towards her bed, a nightmarish hell-spawn of indescribable ferocity...") + +combine_audio_video("output.wav", "original_video.webm", "combined_video.mp4") diff --git a/male.wav b/male.wav index 5048a869..8eceb864 100644 Binary files a/male.wav and b/male.wav differ diff --git a/original_video.webm b/original_video.webm new file mode 100644 index 00000000..5d8bbd7f --- /dev/null +++ b/original_video.webm @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4f7948bf400fd4b9efbdae8068ce6ad2b308563173f946cda2657ddc11622385 +size 238798706 diff --git a/output.wav b/output.wav index d473928f..fb4dd757 100644 Binary files a/output.wav and b/output.wav differ diff --git a/test.py b/test.py new file mode 100644 index 00000000..da3d2ad2 --- /dev/null +++ b/test.py @@ -0,0 +1,19 @@ +import openai + +your_openai_key = 'sk-...' +d = { + 'Arabic': 'كان ليل هادئ في تلك القرية النائية، عندما استيقظ جون فجأة على صوت خشخشة غريبة. فتح عينيه ببطء ليجد ظلاً كئيباً يقف عند طرف سريره. كان الظل طويلاً ونحيفاً بشكل مخيف، ولم يكن له ملامح واضحة.', +} + + +client = openai.OpenAI(api_key="sk-proj-Dhg6JAP1ADX0VpM3ARg6T3BlbkFJmchI4lsTbZrV0X3XitI0") +voices = ['alloy', 'echo', 'fable', 'onyx', 'nova', 'shimmer'] + +for language in d: + response = client.audio.speech.create( + model="tts-1", + voice='shimmer', + input=d[language] + ) + + response.stream_to_file(f'{language}5.mp3') \ No newline at end of file