cranky-coder08 commited on
Commit
6422606
·
verified ·
1 Parent(s): db52997

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +64 -0
  2. merged_tinyllama_logger/model.safetensors +3 -0
  3. phivenv/Lib/site-packages/__pycache__/isympy.cpython-39.pyc +0 -0
  4. phivenv/Lib/site-packages/__pycache__/typing_extensions.cpython-39.pyc +3 -0
  5. phivenv/Lib/site-packages/_distutils_hack/__init__.py +128 -0
  6. phivenv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc +0 -0
  7. phivenv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc +0 -0
  8. phivenv/Lib/site-packages/_distutils_hack/override.py +1 -0
  9. phivenv/Lib/site-packages/_yaml/__init__.py +33 -0
  10. phivenv/Lib/site-packages/_yaml/__pycache__/__init__.cpython-39.pyc +0 -0
  11. phivenv/Lib/site-packages/torch/__pycache__/_meta_registrations.cpython-39.pyc +3 -0
  12. phivenv/Lib/site-packages/torch/__pycache__/_tensor_docs.cpython-39.pyc +3 -0
  13. phivenv/Lib/site-packages/torch/__pycache__/_torch_docs.cpython-39.pyc +3 -0
  14. phivenv/Lib/site-packages/torch/__pycache__/overrides.cpython-39.pyc +3 -0
  15. phivenv/Lib/site-packages/torch/_decomp/__pycache__/decompositions.cpython-39.pyc +3 -0
  16. phivenv/Lib/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-39.pyc +3 -0
  17. phivenv/Lib/site-packages/torch/_dynamo/__pycache__/trace_rules.cpython-39.pyc +3 -0
  18. phivenv/Lib/site-packages/torch/_dynamo/__pycache__/utils.cpython-39.pyc +3 -0
  19. phivenv/Lib/site-packages/torch/_inductor/__pycache__/codecache.cpython-39.pyc +3 -0
  20. phivenv/Lib/site-packages/torch/_inductor/__pycache__/ir.cpython-39.pyc +3 -0
  21. phivenv/Lib/site-packages/torch/_inductor/__pycache__/lowering.cpython-39.pyc +3 -0
  22. phivenv/Lib/site-packages/torch/_inductor/__pycache__/scheduler.cpython-39.pyc +3 -0
  23. phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/cpp.cpython-39.pyc +3 -0
  24. phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/triton.cpython-39.pyc +3 -0
  25. phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/wrapper.cpython-39.pyc +3 -0
  26. phivenv/Lib/site-packages/torch/_refs/__pycache__/__init__.cpython-39.pyc +3 -0
  27. phivenv/Lib/site-packages/torch/lib/dnnl.lib +3 -0
  28. phivenv/Lib/site-packages/torch/lib/torch_cpu.dll +3 -0
  29. phivenv/Lib/site-packages/torch/nn/__pycache__/functional.cpython-39.pyc +3 -0
  30. phivenv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset9.cpython-39.pyc +3 -0
  31. phivenv/Lib/site-packages/torch/sparse/__pycache__/_triton_ops_meta.cpython-39.pyc +3 -0
  32. phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_methods_invocations.cpython-39.pyc +3 -0
  33. phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_nn.cpython-39.pyc +3 -0
  34. phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_quantization.cpython-39.pyc +3 -0
  35. phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_utils.cpython-39.pyc +3 -0
  36. phivenv/Lib/site-packages/torch/testing/_internal/distributed/__pycache__/distributed_test.cpython-39.pyc +3 -0
  37. phivenv/Lib/site-packages/torch/testing/_internal/distributed/rpc/__pycache__/rpc_test.cpython-39.pyc +3 -0
  38. phivenv/Lib/site-packages/torch/testing/_internal/generated/__pycache__/annotated_fn_args.cpython-39.pyc +3 -0
  39. phivenv/Lib/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-39.pyc +3 -0
  40. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/INSTALLER +1 -0
  41. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/METADATA +850 -0
  42. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/RECORD +0 -0
  43. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/REQUESTED +0 -0
  44. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/WHEEL +5 -0
  45. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/entry_points.txt +3 -0
  46. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/licenses/LICENSE +203 -0
  47. phivenv/Lib/site-packages/transformers-4.56.1.dist-info/top_level.txt +1 -0
  48. phivenv/Lib/site-packages/transformers/__pycache__/__init__.cpython-39.pyc +0 -0
  49. phivenv/Lib/site-packages/transformers/__pycache__/activations.cpython-39.pyc +0 -0
  50. phivenv/Lib/site-packages/transformers/__pycache__/activations_tf.cpython-39.pyc +0 -0
.gitattributes CHANGED
@@ -128,3 +128,67 @@ phivenv/Lib/site-packages/torch/lib/torch_python.lib filter=lfs diff=lfs merge=l
128
  phivenv/Lib/site-packages/torch/lib/uv.dll filter=lfs diff=lfs merge=lfs -text
129
  phivenv/Lib/site-packages/torch/lib/XNNPACK.lib filter=lfs diff=lfs merge=lfs -text
130
  phivenv/Lib/site-packages/torch/linalg/__pycache__/__init__.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
128
  phivenv/Lib/site-packages/torch/lib/uv.dll filter=lfs diff=lfs merge=lfs -text
129
  phivenv/Lib/site-packages/torch/lib/XNNPACK.lib filter=lfs diff=lfs merge=lfs -text
130
  phivenv/Lib/site-packages/torch/linalg/__pycache__/__init__.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
131
+ phivenv/Lib/site-packages/torch/nn/__pycache__/functional.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
132
+ phivenv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset9.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
133
+ phivenv/Lib/site-packages/torch/sparse/__pycache__/_triton_ops_meta.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
134
+ phivenv/Lib/site-packages/torch/testing/_internal/distributed/rpc/__pycache__/rpc_test.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
135
+ phivenv/Lib/site-packages/torch/testing/_internal/generated/__pycache__/annotated_fn_args.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
136
+ phivenv/Lib/site-packages/torch/testing/_internal/distributed/__pycache__/distributed_test.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
137
+ phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_methods_invocations.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
138
+ phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_nn.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
139
+ phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_quantization.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
140
+ phivenv/Lib/site-packages/torch/lib/torch_cpu.dll filter=lfs diff=lfs merge=lfs -text
141
+ phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_utils.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
142
+ phivenv/Lib/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
143
+ phivenv/Lib/site-packages/torch/_decomp/__pycache__/decompositions.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
144
+ phivenv/Lib/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
145
+ phivenv/Lib/site-packages/torch/_dynamo/__pycache__/trace_rules.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
146
+ phivenv/Lib/site-packages/torch/_dynamo/__pycache__/utils.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
147
+ phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/cpp.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
148
+ phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/triton.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
149
+ phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/wrapper.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
150
+ phivenv/Lib/site-packages/torch/_inductor/__pycache__/codecache.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
151
+ phivenv/Lib/site-packages/torch/_inductor/__pycache__/ir.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
152
+ phivenv/Lib/site-packages/torch/_inductor/__pycache__/lowering.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
153
+ phivenv/Lib/site-packages/torch/_inductor/__pycache__/scheduler.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
154
+ phivenv/Lib/site-packages/torch/_refs/__pycache__/__init__.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
155
+ phivenv/Lib/site-packages/torch/__pycache__/overrides.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
156
+ phivenv/Lib/site-packages/torch/__pycache__/_meta_registrations.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
157
+ phivenv/Lib/site-packages/torch/__pycache__/_tensor_docs.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
158
+ phivenv/Lib/site-packages/transformers/generation/__pycache__/logits_process.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
159
+ phivenv/Lib/site-packages/torch/__pycache__/_torch_docs.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
160
+ phivenv/Lib/site-packages/transformers/generation/__pycache__/tf_utils.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
161
+ phivenv/Lib/site-packages/transformers/generation/__pycache__/utils.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
162
+ phivenv/Lib/site-packages/transformers/models/qwen2_5_omni/__pycache__/modeling_qwen2_5_omni.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
163
+ phivenv/Lib/site-packages/transformers/models/oneformer/__pycache__/modeling_oneformer.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
164
+ phivenv/Lib/site-packages/transformers/models/qwen2_5_omni/__pycache__/modular_qwen2_5_omni.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
165
+ phivenv/Lib/site-packages/transformers/models/seamless_m4t/__pycache__/modeling_seamless_m4t.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
166
+ phivenv/Lib/site-packages/transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
167
+ phivenv/Lib/site-packages/transformers/models/speecht5/__pycache__/modeling_speecht5.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
168
+ phivenv/Lib/site-packages/transformers/__pycache__/modeling_outputs.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
169
+ phivenv/Lib/site-packages/transformers/__pycache__/modeling_tf_utils.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
170
+ phivenv/Lib/site-packages/transformers/__pycache__/testing_utils.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
171
+ phivenv/Lib/site-packages/transformers/__pycache__/modeling_utils.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
172
+ phivenv/Lib/site-packages/transformers/__pycache__/tokenization_utils_base.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
173
+ phivenv/Lib/site-packages/transformers/__pycache__/trainer.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
174
+ phivenv/Lib/site-packages/transformers/__pycache__/training_args.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
175
+ phivenv/Lib/site-packages/__pycache__/typing_extensions.cpython-39.pyc filter=lfs diff=lfs merge=lfs -text
176
+ phivenv/Scripts/f2py.exe filter=lfs diff=lfs merge=lfs -text
177
+ phivenv/Lib/site-packages/yaml/_yaml.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
178
+ phivenv/Scripts/hf.exe filter=lfs diff=lfs merge=lfs -text
179
+ phivenv/Scripts/normalizer.exe filter=lfs diff=lfs merge=lfs -text
180
+ phivenv/Scripts/huggingface-cli.exe filter=lfs diff=lfs merge=lfs -text
181
+ phivenv/Scripts/isympy.exe filter=lfs diff=lfs merge=lfs -text
182
+ phivenv/Scripts/numpy-config.exe filter=lfs diff=lfs merge=lfs -text
183
+ phivenv/Scripts/pip3.exe filter=lfs diff=lfs merge=lfs -text
184
+ phivenv/Scripts/pip.exe filter=lfs diff=lfs merge=lfs -text
185
+ phivenv/Scripts/pip3.9.exe filter=lfs diff=lfs merge=lfs -text
186
+ phivenv/Scripts/pythonw.exe filter=lfs diff=lfs merge=lfs -text
187
+ phivenv/Scripts/python.exe filter=lfs diff=lfs merge=lfs -text
188
+ phivenv/Scripts/tiny-agents.exe filter=lfs diff=lfs merge=lfs -text
189
+ phivenv/Scripts/torchfrtrace.exe filter=lfs diff=lfs merge=lfs -text
190
+ phivenv/Scripts/torchrun.exe filter=lfs diff=lfs merge=lfs -text
191
+ phivenv/Scripts/tqdm.exe filter=lfs diff=lfs merge=lfs -text
192
+ phivenv/Scripts/transformers.exe filter=lfs diff=lfs merge=lfs -text
193
+ phivenv/Scripts/transformers-cli.exe filter=lfs diff=lfs merge=lfs -text
194
+ phivenv/Lib/site-packages/torch/lib/dnnl.lib filter=lfs diff=lfs merge=lfs -text
merged_tinyllama_logger/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9a3e493fbfbcd04d70443a3471f812320814204ac41ad0d3e39946389ac98f7
3
+ size 2200119664
phivenv/Lib/site-packages/__pycache__/isympy.cpython-39.pyc ADDED
Binary file (9.41 kB). View file
 
phivenv/Lib/site-packages/__pycache__/typing_extensions.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aadac2fcfa87f07f3ef6b6bbdb828cec938ef658a6eb3bb6d6bf73f0ee590646
3
+ size 116965
phivenv/Lib/site-packages/_distutils_hack/__init__.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import os
3
+ import re
4
+ import importlib
5
+ import warnings
6
+
7
+
8
+ is_pypy = '__pypy__' in sys.builtin_module_names
9
+
10
+
11
+ warnings.filterwarnings('ignore',
12
+ r'.+ distutils\b.+ deprecated',
13
+ DeprecationWarning)
14
+
15
+
16
+ def warn_distutils_present():
17
+ if 'distutils' not in sys.modules:
18
+ return
19
+ if is_pypy and sys.version_info < (3, 7):
20
+ # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
21
+ # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
22
+ return
23
+ warnings.warn(
24
+ "Distutils was imported before Setuptools, but importing Setuptools "
25
+ "also replaces the `distutils` module in `sys.modules`. This may lead "
26
+ "to undesirable behaviors or errors. To avoid these issues, avoid "
27
+ "using distutils directly, ensure that setuptools is installed in the "
28
+ "traditional way (e.g. not an editable install), and/or make sure "
29
+ "that setuptools is always imported before distutils.")
30
+
31
+
32
+ def clear_distutils():
33
+ if 'distutils' not in sys.modules:
34
+ return
35
+ warnings.warn("Setuptools is replacing distutils.")
36
+ mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
37
+ for name in mods:
38
+ del sys.modules[name]
39
+
40
+
41
+ def enabled():
42
+ """
43
+ Allow selection of distutils by environment variable.
44
+ """
45
+ which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
46
+ return which == 'local'
47
+
48
+
49
+ def ensure_local_distutils():
50
+ clear_distutils()
51
+ distutils = importlib.import_module('setuptools._distutils')
52
+ distutils.__name__ = 'distutils'
53
+ sys.modules['distutils'] = distutils
54
+
55
+ # sanity check that submodules load as expected
56
+ core = importlib.import_module('distutils.core')
57
+ assert '_distutils' in core.__file__, core.__file__
58
+
59
+
60
+ def do_override():
61
+ """
62
+ Ensure that the local copy of distutils is preferred over stdlib.
63
+
64
+ See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
65
+ for more motivation.
66
+ """
67
+ if enabled():
68
+ warn_distutils_present()
69
+ ensure_local_distutils()
70
+
71
+
72
+ class DistutilsMetaFinder:
73
+ def find_spec(self, fullname, path, target=None):
74
+ if path is not None:
75
+ return
76
+
77
+ method_name = 'spec_for_{fullname}'.format(**locals())
78
+ method = getattr(self, method_name, lambda: None)
79
+ return method()
80
+
81
+ def spec_for_distutils(self):
82
+ import importlib.abc
83
+ import importlib.util
84
+
85
+ class DistutilsLoader(importlib.abc.Loader):
86
+
87
+ def create_module(self, spec):
88
+ return importlib.import_module('setuptools._distutils')
89
+
90
+ def exec_module(self, module):
91
+ pass
92
+
93
+ return importlib.util.spec_from_loader('distutils', DistutilsLoader())
94
+
95
+ def spec_for_pip(self):
96
+ """
97
+ Ensure stdlib distutils when running under pip.
98
+ See pypa/pip#8761 for rationale.
99
+ """
100
+ if self.pip_imported_during_build():
101
+ return
102
+ clear_distutils()
103
+ self.spec_for_distutils = lambda: None
104
+
105
+ @staticmethod
106
+ def pip_imported_during_build():
107
+ """
108
+ Detect if pip is being imported in a build script. Ref #2355.
109
+ """
110
+ import traceback
111
+ return any(
112
+ frame.f_globals['__file__'].endswith('setup.py')
113
+ for frame, line in traceback.walk_stack(None)
114
+ )
115
+
116
+
117
+ DISTUTILS_FINDER = DistutilsMetaFinder()
118
+
119
+
120
+ def add_shim():
121
+ sys.meta_path.insert(0, DISTUTILS_FINDER)
122
+
123
+
124
+ def remove_shim():
125
+ try:
126
+ sys.meta_path.remove(DISTUTILS_FINDER)
127
+ except ValueError:
128
+ pass
phivenv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (5.08 kB). View file
 
phivenv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc ADDED
Binary file (210 Bytes). View file
 
phivenv/Lib/site-packages/_distutils_hack/override.py ADDED
@@ -0,0 +1 @@
 
 
1
+ __import__('_distutils_hack').do_override()
phivenv/Lib/site-packages/_yaml/__init__.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This is a stub package designed to roughly emulate the _yaml
2
+ # extension module, which previously existed as a standalone module
3
+ # and has been moved into the `yaml` package namespace.
4
+ # It does not perfectly mimic its old counterpart, but should get
5
+ # close enough for anyone who's relying on it even when they shouldn't.
6
+ import yaml
7
+
8
+ # in some circumstances, the yaml module we imoprted may be from a different version, so we need
9
+ # to tread carefully when poking at it here (it may not have the attributes we expect)
10
+ if not getattr(yaml, '__with_libyaml__', False):
11
+ from sys import version_info
12
+
13
+ exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
14
+ raise exc("No module named '_yaml'")
15
+ else:
16
+ from yaml._yaml import *
17
+ import warnings
18
+ warnings.warn(
19
+ 'The _yaml extension module is now located at yaml._yaml'
20
+ ' and its location is subject to change. To use the'
21
+ ' LibYAML-based parser and emitter, import from `yaml`:'
22
+ ' `from yaml import CLoader as Loader, CDumper as Dumper`.',
23
+ DeprecationWarning
24
+ )
25
+ del warnings
26
+ # Don't `del yaml` here because yaml is actually an existing
27
+ # namespace member of _yaml.
28
+
29
+ __name__ = '_yaml'
30
+ # If the module is top-level (i.e. not a part of any specific package)
31
+ # then the attribute should be set to ''.
32
+ # https://docs.python.org/3.8/library/types.html
33
+ __package__ = ''
phivenv/Lib/site-packages/_yaml/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (713 Bytes). View file
 
phivenv/Lib/site-packages/torch/__pycache__/_meta_registrations.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ae1c3d27d5d2548cb5e6be65eb6a093e7e3c3a9ff1e06c8afb9841c04f86494
3
+ size 209408
phivenv/Lib/site-packages/torch/__pycache__/_tensor_docs.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3e4323e69be22cad3b79b10d11b27de54b3b29208e13613c9ffc81a814e342c
3
+ size 136246
phivenv/Lib/site-packages/torch/__pycache__/_torch_docs.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40f0ad3cd2660fa732425f4b7da696a2db14c9a1cf0e2d8567e2d9525f740ae1
3
+ size 417156
phivenv/Lib/site-packages/torch/__pycache__/overrides.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89b4a7b47b10e16f8f9766d3430c43985635b18e6d8a62175607a10933fe8de5
3
+ size 144431
phivenv/Lib/site-packages/torch/_decomp/__pycache__/decompositions.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15343d84e2771ff78d9b8ffaecc7a51db9c4d42459fbef4d17031f13094ba107
3
+ size 124767
phivenv/Lib/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3269876f6260c211bdcd324eb35987ad40b503c68cfbafe5eabb7e5f3b054bc
3
+ size 109794
phivenv/Lib/site-packages/torch/_dynamo/__pycache__/trace_rules.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7410b48f43e43d3f20e1594eb7afb30c6bc5d9bf05a1530159e89b1e38a5192e
3
+ size 108028
phivenv/Lib/site-packages/torch/_dynamo/__pycache__/utils.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bfaf1a7f8748438fe2c7388bdea73e49492037e82ebe8a34c372658014d4ba8
3
+ size 126639
phivenv/Lib/site-packages/torch/_inductor/__pycache__/codecache.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4856347f35d9d395071ea59e035d468bd05041727887e98e52c3845f82878ab9
3
+ size 111045
phivenv/Lib/site-packages/torch/_inductor/__pycache__/ir.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:694573ea5aa1d90d3751db797683054b2d5b4ef37edf2529331a5058ca3c4c5b
3
+ size 244037
phivenv/Lib/site-packages/torch/_inductor/__pycache__/lowering.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16632de209f90a9a290cd0a2030099fed9b99d8733fc6a1f3c6deea1fe21f42e
3
+ size 171411
phivenv/Lib/site-packages/torch/_inductor/__pycache__/scheduler.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dd8a5f9bfbf809e0385f18d343fc00ec0705fb8bc7d8ea1f336584546ef45f4
3
+ size 147045
phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/cpp.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da7211073f74db139088fcb2753a0d6ef8aef8fd814d11084cacea06c36adfb8
3
+ size 150547
phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/triton.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc8af6dec16a650c9be35d3bd8a55ed14c79f073fb0b07efb9ee83571085b3d8
3
+ size 130879
phivenv/Lib/site-packages/torch/_inductor/codegen/__pycache__/wrapper.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a104e03f555c629943a7dc748f5ba6dd1659c76109e563dffef5ef9e261f8f2
3
+ size 104191
phivenv/Lib/site-packages/torch/_refs/__pycache__/__init__.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:925e670b8052eba3739169a7ae7a783784f0b10718ad717196439b16e6e74041
3
+ size 148324
phivenv/Lib/site-packages/torch/lib/dnnl.lib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50c54db622374faed553e2698a7924b244414c9adc4791b1b90dc66300f525da
3
+ size 694253678
phivenv/Lib/site-packages/torch/lib/torch_cpu.dll ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:671457d3a62d7acff7c28c7ca1cd9dd1f46bb6c6a881923659e8e6140d67ba1a
3
+ size 255934976
phivenv/Lib/site-packages/torch/nn/__pycache__/functional.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52ccec195f7402492977ac01a0cc6bea7c44bf4894c648f0a06eb8e14fbe87d7
3
+ size 188573
phivenv/Lib/site-packages/torch/onnx/__pycache__/symbolic_opset9.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67b4ae694bb98430c8edbb194fb5d98a1c0157e200da7a1aa556200f95b108bb
3
+ size 138920
phivenv/Lib/site-packages/torch/sparse/__pycache__/_triton_ops_meta.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da26879cd8c66c51ed3508010ca3923a2ed437b85b3378dffe5a2976c48f3648
3
+ size 178739
phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_methods_invocations.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3df98f2394a4b992cb5f47317ac9fea79e051c662f1c35add08fce4bb1f4f7ee
3
+ size 535247
phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_nn.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1db65db0825d363c390fa100843f7f5470bf1b3a3d88bf786743b69b1e0d89fb
3
+ size 124257
phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_quantization.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6ab268abaea6df7740a026813603213545b5e5bedf9bd1a94bc25fb2342eb8c
3
+ size 115912
phivenv/Lib/site-packages/torch/testing/_internal/__pycache__/common_utils.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48ae37810406988c0d56fbc91147d727add18a09a23931efc7e65b6c065d2bf7
3
+ size 176482
phivenv/Lib/site-packages/torch/testing/_internal/distributed/__pycache__/distributed_test.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c9f08e0ee57a54bf9b863ead7fc83e2e718b902f51e68f7884cab763abe7a9f
3
+ size 266005
phivenv/Lib/site-packages/torch/testing/_internal/distributed/rpc/__pycache__/rpc_test.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d6747e99564aa64f17456364b89c0d9f55cf125799699c47298589137938008
3
+ size 194991
phivenv/Lib/site-packages/torch/testing/_internal/generated/__pycache__/annotated_fn_args.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4fbc81d1b07f40240ab955baa712c5eabe38aba289f5b6535e815b89d5682da2
3
+ size 127238
phivenv/Lib/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-39.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c6a0b4a8ea4c576c58c29e54debba6ebfc3befdc5aebf69d3eb5c4bcb0d2103
3
+ size 188704
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/METADATA ADDED
@@ -0,0 +1,850 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.4
2
+ Name: transformers
3
+ Version: 4.56.1
4
+ Summary: State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow
5
+ Home-page: https://github.com/huggingface/transformers
6
+ Author: The Hugging Face team (past and future) with the help of all our contributors (https://github.com/huggingface/transformers/graphs/contributors)
7
+ Author-email: transformers@huggingface.co
8
+ License: Apache 2.0 License
9
+ Keywords: NLP vision speech deep learning transformer pytorch tensorflow jax BERT GPT-2 Wav2Vec2 ViT
10
+ Classifier: Development Status :: 5 - Production/Stable
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Intended Audience :: Education
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: License :: OSI Approved :: Apache Software License
15
+ Classifier: Operating System :: OS Independent
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
23
+ Requires-Python: >=3.9.0
24
+ Description-Content-Type: text/markdown
25
+ License-File: LICENSE
26
+ Requires-Dist: filelock
27
+ Requires-Dist: huggingface-hub<1.0,>=0.34.0
28
+ Requires-Dist: numpy>=1.17
29
+ Requires-Dist: packaging>=20.0
30
+ Requires-Dist: pyyaml>=5.1
31
+ Requires-Dist: regex!=2019.12.17
32
+ Requires-Dist: requests
33
+ Requires-Dist: tokenizers<=0.23.0,>=0.22.0
34
+ Requires-Dist: safetensors>=0.4.3
35
+ Requires-Dist: tqdm>=4.27
36
+ Provides-Extra: ja
37
+ Requires-Dist: fugashi>=1.0; extra == "ja"
38
+ Requires-Dist: ipadic<2.0,>=1.0.0; extra == "ja"
39
+ Requires-Dist: unidic_lite>=1.0.7; extra == "ja"
40
+ Requires-Dist: unidic>=1.0.2; extra == "ja"
41
+ Requires-Dist: sudachipy>=0.6.6; extra == "ja"
42
+ Requires-Dist: sudachidict_core>=20220729; extra == "ja"
43
+ Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "ja"
44
+ Provides-Extra: sklearn
45
+ Requires-Dist: scikit-learn; extra == "sklearn"
46
+ Provides-Extra: tf
47
+ Requires-Dist: tensorflow<2.16,>2.9; extra == "tf"
48
+ Requires-Dist: onnxconverter-common; extra == "tf"
49
+ Requires-Dist: tf2onnx; extra == "tf"
50
+ Requires-Dist: tensorflow-text<2.16; extra == "tf"
51
+ Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "tf"
52
+ Provides-Extra: tf-cpu
53
+ Requires-Dist: keras<2.16,>2.9; extra == "tf-cpu"
54
+ Requires-Dist: tensorflow-cpu<2.16,>2.9; extra == "tf-cpu"
55
+ Requires-Dist: onnxconverter-common; extra == "tf-cpu"
56
+ Requires-Dist: tf2onnx; extra == "tf-cpu"
57
+ Requires-Dist: tensorflow-text<2.16; extra == "tf-cpu"
58
+ Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "tf-cpu"
59
+ Requires-Dist: tensorflow-probability<0.24; extra == "tf-cpu"
60
+ Provides-Extra: torch
61
+ Requires-Dist: torch>=2.2; extra == "torch"
62
+ Requires-Dist: accelerate>=0.26.0; extra == "torch"
63
+ Provides-Extra: accelerate
64
+ Requires-Dist: accelerate>=0.26.0; extra == "accelerate"
65
+ Provides-Extra: hf-xet
66
+ Requires-Dist: hf_xet; extra == "hf-xet"
67
+ Provides-Extra: retrieval
68
+ Requires-Dist: faiss-cpu; extra == "retrieval"
69
+ Requires-Dist: datasets>=2.15.0; extra == "retrieval"
70
+ Provides-Extra: flax
71
+ Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "flax"
72
+ Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "flax"
73
+ Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "flax"
74
+ Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "flax"
75
+ Requires-Dist: scipy<1.13.0; extra == "flax"
76
+ Provides-Extra: tokenizers
77
+ Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "tokenizers"
78
+ Provides-Extra: ftfy
79
+ Requires-Dist: ftfy; extra == "ftfy"
80
+ Provides-Extra: onnxruntime
81
+ Requires-Dist: onnxruntime>=1.4.0; extra == "onnxruntime"
82
+ Requires-Dist: onnxruntime-tools>=1.4.2; extra == "onnxruntime"
83
+ Provides-Extra: onnx
84
+ Requires-Dist: onnxconverter-common; extra == "onnx"
85
+ Requires-Dist: tf2onnx; extra == "onnx"
86
+ Requires-Dist: onnxruntime>=1.4.0; extra == "onnx"
87
+ Requires-Dist: onnxruntime-tools>=1.4.2; extra == "onnx"
88
+ Provides-Extra: modelcreation
89
+ Requires-Dist: cookiecutter==1.7.3; extra == "modelcreation"
90
+ Provides-Extra: sagemaker
91
+ Requires-Dist: sagemaker>=2.31.0; extra == "sagemaker"
92
+ Provides-Extra: deepspeed
93
+ Requires-Dist: deepspeed>=0.9.3; extra == "deepspeed"
94
+ Requires-Dist: accelerate>=0.26.0; extra == "deepspeed"
95
+ Provides-Extra: optuna
96
+ Requires-Dist: optuna; extra == "optuna"
97
+ Provides-Extra: ray
98
+ Requires-Dist: ray[tune]>=2.7.0; extra == "ray"
99
+ Provides-Extra: sigopt
100
+ Requires-Dist: sigopt; extra == "sigopt"
101
+ Provides-Extra: hub-kernels
102
+ Requires-Dist: kernels<=0.9,>=0.6.1; extra == "hub-kernels"
103
+ Provides-Extra: integrations
104
+ Requires-Dist: kernels<=0.9,>=0.6.1; extra == "integrations"
105
+ Requires-Dist: optuna; extra == "integrations"
106
+ Requires-Dist: ray[tune]>=2.7.0; extra == "integrations"
107
+ Requires-Dist: sigopt; extra == "integrations"
108
+ Provides-Extra: serving
109
+ Requires-Dist: openai>=1.98.0; extra == "serving"
110
+ Requires-Dist: pydantic>=2; extra == "serving"
111
+ Requires-Dist: uvicorn; extra == "serving"
112
+ Requires-Dist: fastapi; extra == "serving"
113
+ Requires-Dist: starlette; extra == "serving"
114
+ Requires-Dist: torch>=2.2; extra == "serving"
115
+ Requires-Dist: accelerate>=0.26.0; extra == "serving"
116
+ Provides-Extra: audio
117
+ Requires-Dist: librosa; extra == "audio"
118
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "audio"
119
+ Requires-Dist: phonemizer; extra == "audio"
120
+ Requires-Dist: kenlm; extra == "audio"
121
+ Provides-Extra: speech
122
+ Requires-Dist: torchaudio; extra == "speech"
123
+ Requires-Dist: librosa; extra == "speech"
124
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "speech"
125
+ Requires-Dist: phonemizer; extra == "speech"
126
+ Requires-Dist: kenlm; extra == "speech"
127
+ Provides-Extra: torch-speech
128
+ Requires-Dist: torchaudio; extra == "torch-speech"
129
+ Requires-Dist: librosa; extra == "torch-speech"
130
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "torch-speech"
131
+ Requires-Dist: phonemizer; extra == "torch-speech"
132
+ Requires-Dist: kenlm; extra == "torch-speech"
133
+ Provides-Extra: tf-speech
134
+ Requires-Dist: librosa; extra == "tf-speech"
135
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "tf-speech"
136
+ Requires-Dist: phonemizer; extra == "tf-speech"
137
+ Requires-Dist: kenlm; extra == "tf-speech"
138
+ Provides-Extra: flax-speech
139
+ Requires-Dist: librosa; extra == "flax-speech"
140
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "flax-speech"
141
+ Requires-Dist: phonemizer; extra == "flax-speech"
142
+ Requires-Dist: kenlm; extra == "flax-speech"
143
+ Provides-Extra: vision
144
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "vision"
145
+ Provides-Extra: timm
146
+ Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "timm"
147
+ Provides-Extra: torch-vision
148
+ Requires-Dist: torchvision; extra == "torch-vision"
149
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "torch-vision"
150
+ Provides-Extra: natten
151
+ Requires-Dist: natten<0.15.0,>=0.14.6; extra == "natten"
152
+ Provides-Extra: codecarbon
153
+ Requires-Dist: codecarbon>=2.8.1; extra == "codecarbon"
154
+ Provides-Extra: video
155
+ Requires-Dist: av; extra == "video"
156
+ Provides-Extra: num2words
157
+ Requires-Dist: num2words; extra == "num2words"
158
+ Provides-Extra: sentencepiece
159
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "sentencepiece"
160
+ Requires-Dist: protobuf; extra == "sentencepiece"
161
+ Provides-Extra: tiktoken
162
+ Requires-Dist: tiktoken; extra == "tiktoken"
163
+ Requires-Dist: blobfile; extra == "tiktoken"
164
+ Provides-Extra: mistral-common
165
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "mistral-common"
166
+ Provides-Extra: chat-template
167
+ Requires-Dist: jinja2>=3.1.0; extra == "chat-template"
168
+ Provides-Extra: testing
169
+ Requires-Dist: pytest>=7.2.0; extra == "testing"
170
+ Requires-Dist: pytest-asyncio; extra == "testing"
171
+ Requires-Dist: pytest-rich; extra == "testing"
172
+ Requires-Dist: pytest-xdist; extra == "testing"
173
+ Requires-Dist: pytest-order; extra == "testing"
174
+ Requires-Dist: pytest-rerunfailures; extra == "testing"
175
+ Requires-Dist: timeout-decorator; extra == "testing"
176
+ Requires-Dist: parameterized>=0.9; extra == "testing"
177
+ Requires-Dist: psutil; extra == "testing"
178
+ Requires-Dist: datasets>=2.15.0; extra == "testing"
179
+ Requires-Dist: dill<0.3.5; extra == "testing"
180
+ Requires-Dist: evaluate>=0.2.0; extra == "testing"
181
+ Requires-Dist: pytest-timeout; extra == "testing"
182
+ Requires-Dist: ruff==0.11.2; extra == "testing"
183
+ Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "testing"
184
+ Requires-Dist: nltk<=3.8.1; extra == "testing"
185
+ Requires-Dist: GitPython<3.1.19; extra == "testing"
186
+ Requires-Dist: sacremoses; extra == "testing"
187
+ Requires-Dist: rjieba; extra == "testing"
188
+ Requires-Dist: beautifulsoup4; extra == "testing"
189
+ Requires-Dist: tensorboard; extra == "testing"
190
+ Requires-Dist: pydantic>=2; extra == "testing"
191
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "testing"
192
+ Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "testing"
193
+ Requires-Dist: libcst; extra == "testing"
194
+ Requires-Dist: faiss-cpu; extra == "testing"
195
+ Requires-Dist: datasets>=2.15.0; extra == "testing"
196
+ Requires-Dist: cookiecutter==1.7.3; extra == "testing"
197
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "testing"
198
+ Provides-Extra: deepspeed-testing
199
+ Requires-Dist: deepspeed>=0.9.3; extra == "deepspeed-testing"
200
+ Requires-Dist: accelerate>=0.26.0; extra == "deepspeed-testing"
201
+ Requires-Dist: pytest>=7.2.0; extra == "deepspeed-testing"
202
+ Requires-Dist: pytest-asyncio; extra == "deepspeed-testing"
203
+ Requires-Dist: pytest-rich; extra == "deepspeed-testing"
204
+ Requires-Dist: pytest-xdist; extra == "deepspeed-testing"
205
+ Requires-Dist: pytest-order; extra == "deepspeed-testing"
206
+ Requires-Dist: pytest-rerunfailures; extra == "deepspeed-testing"
207
+ Requires-Dist: timeout-decorator; extra == "deepspeed-testing"
208
+ Requires-Dist: parameterized>=0.9; extra == "deepspeed-testing"
209
+ Requires-Dist: psutil; extra == "deepspeed-testing"
210
+ Requires-Dist: datasets>=2.15.0; extra == "deepspeed-testing"
211
+ Requires-Dist: dill<0.3.5; extra == "deepspeed-testing"
212
+ Requires-Dist: evaluate>=0.2.0; extra == "deepspeed-testing"
213
+ Requires-Dist: pytest-timeout; extra == "deepspeed-testing"
214
+ Requires-Dist: ruff==0.11.2; extra == "deepspeed-testing"
215
+ Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "deepspeed-testing"
216
+ Requires-Dist: nltk<=3.8.1; extra == "deepspeed-testing"
217
+ Requires-Dist: GitPython<3.1.19; extra == "deepspeed-testing"
218
+ Requires-Dist: sacremoses; extra == "deepspeed-testing"
219
+ Requires-Dist: rjieba; extra == "deepspeed-testing"
220
+ Requires-Dist: beautifulsoup4; extra == "deepspeed-testing"
221
+ Requires-Dist: tensorboard; extra == "deepspeed-testing"
222
+ Requires-Dist: pydantic>=2; extra == "deepspeed-testing"
223
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "deepspeed-testing"
224
+ Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "deepspeed-testing"
225
+ Requires-Dist: libcst; extra == "deepspeed-testing"
226
+ Requires-Dist: faiss-cpu; extra == "deepspeed-testing"
227
+ Requires-Dist: datasets>=2.15.0; extra == "deepspeed-testing"
228
+ Requires-Dist: cookiecutter==1.7.3; extra == "deepspeed-testing"
229
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "deepspeed-testing"
230
+ Requires-Dist: optuna; extra == "deepspeed-testing"
231
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "deepspeed-testing"
232
+ Requires-Dist: protobuf; extra == "deepspeed-testing"
233
+ Provides-Extra: ruff
234
+ Requires-Dist: ruff==0.11.2; extra == "ruff"
235
+ Provides-Extra: quality
236
+ Requires-Dist: datasets>=2.15.0; extra == "quality"
237
+ Requires-Dist: ruff==0.11.2; extra == "quality"
238
+ Requires-Dist: GitPython<3.1.19; extra == "quality"
239
+ Requires-Dist: urllib3<2.0.0; extra == "quality"
240
+ Requires-Dist: libcst; extra == "quality"
241
+ Requires-Dist: rich; extra == "quality"
242
+ Requires-Dist: pandas<2.3.0; extra == "quality"
243
+ Provides-Extra: all
244
+ Requires-Dist: tensorflow<2.16,>2.9; extra == "all"
245
+ Requires-Dist: onnxconverter-common; extra == "all"
246
+ Requires-Dist: tf2onnx; extra == "all"
247
+ Requires-Dist: tensorflow-text<2.16; extra == "all"
248
+ Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "all"
249
+ Requires-Dist: torch>=2.2; extra == "all"
250
+ Requires-Dist: accelerate>=0.26.0; extra == "all"
251
+ Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "all"
252
+ Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "all"
253
+ Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "all"
254
+ Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "all"
255
+ Requires-Dist: scipy<1.13.0; extra == "all"
256
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "all"
257
+ Requires-Dist: protobuf; extra == "all"
258
+ Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "all"
259
+ Requires-Dist: torchaudio; extra == "all"
260
+ Requires-Dist: librosa; extra == "all"
261
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "all"
262
+ Requires-Dist: phonemizer; extra == "all"
263
+ Requires-Dist: kenlm; extra == "all"
264
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "all"
265
+ Requires-Dist: kernels<=0.9,>=0.6.1; extra == "all"
266
+ Requires-Dist: optuna; extra == "all"
267
+ Requires-Dist: ray[tune]>=2.7.0; extra == "all"
268
+ Requires-Dist: sigopt; extra == "all"
269
+ Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "all"
270
+ Requires-Dist: torchvision; extra == "all"
271
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "all"
272
+ Requires-Dist: codecarbon>=2.8.1; extra == "all"
273
+ Requires-Dist: accelerate>=0.26.0; extra == "all"
274
+ Requires-Dist: av; extra == "all"
275
+ Requires-Dist: num2words; extra == "all"
276
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "all"
277
+ Requires-Dist: jinja2>=3.1.0; extra == "all"
278
+ Provides-Extra: dev-torch
279
+ Requires-Dist: pytest>=7.2.0; extra == "dev-torch"
280
+ Requires-Dist: pytest-asyncio; extra == "dev-torch"
281
+ Requires-Dist: pytest-rich; extra == "dev-torch"
282
+ Requires-Dist: pytest-xdist; extra == "dev-torch"
283
+ Requires-Dist: pytest-order; extra == "dev-torch"
284
+ Requires-Dist: pytest-rerunfailures; extra == "dev-torch"
285
+ Requires-Dist: timeout-decorator; extra == "dev-torch"
286
+ Requires-Dist: parameterized>=0.9; extra == "dev-torch"
287
+ Requires-Dist: psutil; extra == "dev-torch"
288
+ Requires-Dist: datasets>=2.15.0; extra == "dev-torch"
289
+ Requires-Dist: dill<0.3.5; extra == "dev-torch"
290
+ Requires-Dist: evaluate>=0.2.0; extra == "dev-torch"
291
+ Requires-Dist: pytest-timeout; extra == "dev-torch"
292
+ Requires-Dist: ruff==0.11.2; extra == "dev-torch"
293
+ Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev-torch"
294
+ Requires-Dist: nltk<=3.8.1; extra == "dev-torch"
295
+ Requires-Dist: GitPython<3.1.19; extra == "dev-torch"
296
+ Requires-Dist: sacremoses; extra == "dev-torch"
297
+ Requires-Dist: rjieba; extra == "dev-torch"
298
+ Requires-Dist: beautifulsoup4; extra == "dev-torch"
299
+ Requires-Dist: tensorboard; extra == "dev-torch"
300
+ Requires-Dist: pydantic>=2; extra == "dev-torch"
301
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-torch"
302
+ Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev-torch"
303
+ Requires-Dist: libcst; extra == "dev-torch"
304
+ Requires-Dist: faiss-cpu; extra == "dev-torch"
305
+ Requires-Dist: datasets>=2.15.0; extra == "dev-torch"
306
+ Requires-Dist: cookiecutter==1.7.3; extra == "dev-torch"
307
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev-torch"
308
+ Requires-Dist: torch>=2.2; extra == "dev-torch"
309
+ Requires-Dist: accelerate>=0.26.0; extra == "dev-torch"
310
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-torch"
311
+ Requires-Dist: protobuf; extra == "dev-torch"
312
+ Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "dev-torch"
313
+ Requires-Dist: torchaudio; extra == "dev-torch"
314
+ Requires-Dist: librosa; extra == "dev-torch"
315
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "dev-torch"
316
+ Requires-Dist: phonemizer; extra == "dev-torch"
317
+ Requires-Dist: kenlm; extra == "dev-torch"
318
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-torch"
319
+ Requires-Dist: kernels<=0.9,>=0.6.1; extra == "dev-torch"
320
+ Requires-Dist: optuna; extra == "dev-torch"
321
+ Requires-Dist: ray[tune]>=2.7.0; extra == "dev-torch"
322
+ Requires-Dist: sigopt; extra == "dev-torch"
323
+ Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "dev-torch"
324
+ Requires-Dist: torchvision; extra == "dev-torch"
325
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-torch"
326
+ Requires-Dist: codecarbon>=2.8.1; extra == "dev-torch"
327
+ Requires-Dist: datasets>=2.15.0; extra == "dev-torch"
328
+ Requires-Dist: ruff==0.11.2; extra == "dev-torch"
329
+ Requires-Dist: GitPython<3.1.19; extra == "dev-torch"
330
+ Requires-Dist: urllib3<2.0.0; extra == "dev-torch"
331
+ Requires-Dist: libcst; extra == "dev-torch"
332
+ Requires-Dist: rich; extra == "dev-torch"
333
+ Requires-Dist: pandas<2.3.0; extra == "dev-torch"
334
+ Requires-Dist: fugashi>=1.0; extra == "dev-torch"
335
+ Requires-Dist: ipadic<2.0,>=1.0.0; extra == "dev-torch"
336
+ Requires-Dist: unidic_lite>=1.0.7; extra == "dev-torch"
337
+ Requires-Dist: unidic>=1.0.2; extra == "dev-torch"
338
+ Requires-Dist: sudachipy>=0.6.6; extra == "dev-torch"
339
+ Requires-Dist: sudachidict_core>=20220729; extra == "dev-torch"
340
+ Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "dev-torch"
341
+ Requires-Dist: scikit-learn; extra == "dev-torch"
342
+ Requires-Dist: cookiecutter==1.7.3; extra == "dev-torch"
343
+ Requires-Dist: onnxruntime>=1.4.0; extra == "dev-torch"
344
+ Requires-Dist: onnxruntime-tools>=1.4.2; extra == "dev-torch"
345
+ Requires-Dist: num2words; extra == "dev-torch"
346
+ Provides-Extra: dev-tensorflow
347
+ Requires-Dist: pytest>=7.2.0; extra == "dev-tensorflow"
348
+ Requires-Dist: pytest-asyncio; extra == "dev-tensorflow"
349
+ Requires-Dist: pytest-rich; extra == "dev-tensorflow"
350
+ Requires-Dist: pytest-xdist; extra == "dev-tensorflow"
351
+ Requires-Dist: pytest-order; extra == "dev-tensorflow"
352
+ Requires-Dist: pytest-rerunfailures; extra == "dev-tensorflow"
353
+ Requires-Dist: timeout-decorator; extra == "dev-tensorflow"
354
+ Requires-Dist: parameterized>=0.9; extra == "dev-tensorflow"
355
+ Requires-Dist: psutil; extra == "dev-tensorflow"
356
+ Requires-Dist: datasets>=2.15.0; extra == "dev-tensorflow"
357
+ Requires-Dist: dill<0.3.5; extra == "dev-tensorflow"
358
+ Requires-Dist: evaluate>=0.2.0; extra == "dev-tensorflow"
359
+ Requires-Dist: pytest-timeout; extra == "dev-tensorflow"
360
+ Requires-Dist: ruff==0.11.2; extra == "dev-tensorflow"
361
+ Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev-tensorflow"
362
+ Requires-Dist: nltk<=3.8.1; extra == "dev-tensorflow"
363
+ Requires-Dist: GitPython<3.1.19; extra == "dev-tensorflow"
364
+ Requires-Dist: sacremoses; extra == "dev-tensorflow"
365
+ Requires-Dist: rjieba; extra == "dev-tensorflow"
366
+ Requires-Dist: beautifulsoup4; extra == "dev-tensorflow"
367
+ Requires-Dist: tensorboard; extra == "dev-tensorflow"
368
+ Requires-Dist: pydantic>=2; extra == "dev-tensorflow"
369
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-tensorflow"
370
+ Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev-tensorflow"
371
+ Requires-Dist: libcst; extra == "dev-tensorflow"
372
+ Requires-Dist: faiss-cpu; extra == "dev-tensorflow"
373
+ Requires-Dist: datasets>=2.15.0; extra == "dev-tensorflow"
374
+ Requires-Dist: cookiecutter==1.7.3; extra == "dev-tensorflow"
375
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev-tensorflow"
376
+ Requires-Dist: tensorflow<2.16,>2.9; extra == "dev-tensorflow"
377
+ Requires-Dist: onnxconverter-common; extra == "dev-tensorflow"
378
+ Requires-Dist: tf2onnx; extra == "dev-tensorflow"
379
+ Requires-Dist: tensorflow-text<2.16; extra == "dev-tensorflow"
380
+ Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "dev-tensorflow"
381
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-tensorflow"
382
+ Requires-Dist: protobuf; extra == "dev-tensorflow"
383
+ Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "dev-tensorflow"
384
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-tensorflow"
385
+ Requires-Dist: datasets>=2.15.0; extra == "dev-tensorflow"
386
+ Requires-Dist: ruff==0.11.2; extra == "dev-tensorflow"
387
+ Requires-Dist: GitPython<3.1.19; extra == "dev-tensorflow"
388
+ Requires-Dist: urllib3<2.0.0; extra == "dev-tensorflow"
389
+ Requires-Dist: libcst; extra == "dev-tensorflow"
390
+ Requires-Dist: rich; extra == "dev-tensorflow"
391
+ Requires-Dist: pandas<2.3.0; extra == "dev-tensorflow"
392
+ Requires-Dist: scikit-learn; extra == "dev-tensorflow"
393
+ Requires-Dist: cookiecutter==1.7.3; extra == "dev-tensorflow"
394
+ Requires-Dist: onnxconverter-common; extra == "dev-tensorflow"
395
+ Requires-Dist: tf2onnx; extra == "dev-tensorflow"
396
+ Requires-Dist: onnxruntime>=1.4.0; extra == "dev-tensorflow"
397
+ Requires-Dist: onnxruntime-tools>=1.4.2; extra == "dev-tensorflow"
398
+ Requires-Dist: librosa; extra == "dev-tensorflow"
399
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "dev-tensorflow"
400
+ Requires-Dist: phonemizer; extra == "dev-tensorflow"
401
+ Requires-Dist: kenlm; extra == "dev-tensorflow"
402
+ Provides-Extra: dev
403
+ Requires-Dist: tensorflow<2.16,>2.9; extra == "dev"
404
+ Requires-Dist: onnxconverter-common; extra == "dev"
405
+ Requires-Dist: tf2onnx; extra == "dev"
406
+ Requires-Dist: tensorflow-text<2.16; extra == "dev"
407
+ Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "dev"
408
+ Requires-Dist: torch>=2.2; extra == "dev"
409
+ Requires-Dist: accelerate>=0.26.0; extra == "dev"
410
+ Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "dev"
411
+ Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "dev"
412
+ Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "dev"
413
+ Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "dev"
414
+ Requires-Dist: scipy<1.13.0; extra == "dev"
415
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev"
416
+ Requires-Dist: protobuf; extra == "dev"
417
+ Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "dev"
418
+ Requires-Dist: torchaudio; extra == "dev"
419
+ Requires-Dist: librosa; extra == "dev"
420
+ Requires-Dist: pyctcdecode>=0.4.0; extra == "dev"
421
+ Requires-Dist: phonemizer; extra == "dev"
422
+ Requires-Dist: kenlm; extra == "dev"
423
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev"
424
+ Requires-Dist: kernels<=0.9,>=0.6.1; extra == "dev"
425
+ Requires-Dist: optuna; extra == "dev"
426
+ Requires-Dist: ray[tune]>=2.7.0; extra == "dev"
427
+ Requires-Dist: sigopt; extra == "dev"
428
+ Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "dev"
429
+ Requires-Dist: torchvision; extra == "dev"
430
+ Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev"
431
+ Requires-Dist: codecarbon>=2.8.1; extra == "dev"
432
+ Requires-Dist: accelerate>=0.26.0; extra == "dev"
433
+ Requires-Dist: av; extra == "dev"
434
+ Requires-Dist: num2words; extra == "dev"
435
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev"
436
+ Requires-Dist: jinja2>=3.1.0; extra == "dev"
437
+ Requires-Dist: pytest>=7.2.0; extra == "dev"
438
+ Requires-Dist: pytest-asyncio; extra == "dev"
439
+ Requires-Dist: pytest-rich; extra == "dev"
440
+ Requires-Dist: pytest-xdist; extra == "dev"
441
+ Requires-Dist: pytest-order; extra == "dev"
442
+ Requires-Dist: pytest-rerunfailures; extra == "dev"
443
+ Requires-Dist: timeout-decorator; extra == "dev"
444
+ Requires-Dist: parameterized>=0.9; extra == "dev"
445
+ Requires-Dist: psutil; extra == "dev"
446
+ Requires-Dist: datasets>=2.15.0; extra == "dev"
447
+ Requires-Dist: dill<0.3.5; extra == "dev"
448
+ Requires-Dist: evaluate>=0.2.0; extra == "dev"
449
+ Requires-Dist: pytest-timeout; extra == "dev"
450
+ Requires-Dist: ruff==0.11.2; extra == "dev"
451
+ Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev"
452
+ Requires-Dist: nltk<=3.8.1; extra == "dev"
453
+ Requires-Dist: GitPython<3.1.19; extra == "dev"
454
+ Requires-Dist: sacremoses; extra == "dev"
455
+ Requires-Dist: rjieba; extra == "dev"
456
+ Requires-Dist: beautifulsoup4; extra == "dev"
457
+ Requires-Dist: tensorboard; extra == "dev"
458
+ Requires-Dist: pydantic>=2; extra == "dev"
459
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev"
460
+ Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev"
461
+ Requires-Dist: libcst; extra == "dev"
462
+ Requires-Dist: faiss-cpu; extra == "dev"
463
+ Requires-Dist: datasets>=2.15.0; extra == "dev"
464
+ Requires-Dist: cookiecutter==1.7.3; extra == "dev"
465
+ Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev"
466
+ Requires-Dist: datasets>=2.15.0; extra == "dev"
467
+ Requires-Dist: ruff==0.11.2; extra == "dev"
468
+ Requires-Dist: GitPython<3.1.19; extra == "dev"
469
+ Requires-Dist: urllib3<2.0.0; extra == "dev"
470
+ Requires-Dist: libcst; extra == "dev"
471
+ Requires-Dist: rich; extra == "dev"
472
+ Requires-Dist: pandas<2.3.0; extra == "dev"
473
+ Requires-Dist: fugashi>=1.0; extra == "dev"
474
+ Requires-Dist: ipadic<2.0,>=1.0.0; extra == "dev"
475
+ Requires-Dist: unidic_lite>=1.0.7; extra == "dev"
476
+ Requires-Dist: unidic>=1.0.2; extra == "dev"
477
+ Requires-Dist: sudachipy>=0.6.6; extra == "dev"
478
+ Requires-Dist: sudachidict_core>=20220729; extra == "dev"
479
+ Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "dev"
480
+ Requires-Dist: scikit-learn; extra == "dev"
481
+ Requires-Dist: cookiecutter==1.7.3; extra == "dev"
482
+ Provides-Extra: torchhub
483
+ Requires-Dist: filelock; extra == "torchhub"
484
+ Requires-Dist: huggingface-hub<1.0,>=0.34.0; extra == "torchhub"
485
+ Requires-Dist: importlib_metadata; extra == "torchhub"
486
+ Requires-Dist: numpy>=1.17; extra == "torchhub"
487
+ Requires-Dist: packaging>=20.0; extra == "torchhub"
488
+ Requires-Dist: protobuf; extra == "torchhub"
489
+ Requires-Dist: regex!=2019.12.17; extra == "torchhub"
490
+ Requires-Dist: requests; extra == "torchhub"
491
+ Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "torchhub"
492
+ Requires-Dist: torch>=2.2; extra == "torchhub"
493
+ Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "torchhub"
494
+ Requires-Dist: tqdm>=4.27; extra == "torchhub"
495
+ Provides-Extra: benchmark
496
+ Requires-Dist: optimum-benchmark>=0.3.0; extra == "benchmark"
497
+ Provides-Extra: open-telemetry
498
+ Requires-Dist: opentelemetry-api; extra == "open-telemetry"
499
+ Requires-Dist: opentelemetry-exporter-otlp; extra == "open-telemetry"
500
+ Requires-Dist: opentelemetry-sdk; extra == "open-telemetry"
501
+ Dynamic: author
502
+ Dynamic: author-email
503
+ Dynamic: classifier
504
+ Dynamic: description
505
+ Dynamic: description-content-type
506
+ Dynamic: home-page
507
+ Dynamic: keywords
508
+ Dynamic: license
509
+ Dynamic: license-file
510
+ Dynamic: provides-extra
511
+ Dynamic: requires-dist
512
+ Dynamic: requires-python
513
+ Dynamic: summary
514
+
515
+ <!---
516
+ Copyright 2020 The HuggingFace Team. All rights reserved.
517
+
518
+ Licensed under the Apache License, Version 2.0 (the "License");
519
+ you may not use this file except in compliance with the License.
520
+ You may obtain a copy of the License at
521
+
522
+ http://www.apache.org/licenses/LICENSE-2.0
523
+
524
+ Unless required by applicable law or agreed to in writing, software
525
+ distributed under the License is distributed on an "AS IS" BASIS,
526
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
527
+ See the License for the specific language governing permissions and
528
+ limitations under the License.
529
+ -->
530
+
531
+ <p align="center">
532
+ <picture>
533
+ <source media="(prefers-color-scheme: dark)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-dark.svg">
534
+ <source media="(prefers-color-scheme: light)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-light.svg">
535
+ <img alt="Hugging Face Transformers Library" src="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-light.svg" width="352" height="59" style="max-width: 100%;">
536
+ </picture>
537
+ <br/>
538
+ <br/>
539
+ </p>
540
+
541
+ <p align="center">
542
+ <a href="https://huggingface.com/models"><img alt="Checkpoints on Hub" src="https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/models&color=brightgreen"></a>
543
+ <a href="https://circleci.com/gh/huggingface/transformers"><img alt="Build" src="https://img.shields.io/circleci/build/github/huggingface/transformers/main"></a>
544
+ <a href="https://github.com/huggingface/transformers/blob/main/LICENSE"><img alt="GitHub" src="https://img.shields.io/github/license/huggingface/transformers.svg?color=blue"></a>
545
+ <a href="https://huggingface.co/docs/transformers/index"><img alt="Documentation" src="https://img.shields.io/website/http/huggingface.co/docs/transformers/index.svg?down_color=red&down_message=offline&up_message=online"></a>
546
+ <a href="https://github.com/huggingface/transformers/releases"><img alt="GitHub release" src="https://img.shields.io/github/release/huggingface/transformers.svg"></a>
547
+ <a href="https://github.com/huggingface/transformers/blob/main/CODE_OF_CONDUCT.md"><img alt="Contributor Covenant" src="https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg"></a>
548
+ <a href="https://zenodo.org/badge/latestdoi/155220641"><img src="https://zenodo.org/badge/155220641.svg" alt="DOI"></a>
549
+ </p>
550
+
551
+ <h4 align="center">
552
+ <p>
553
+ <b>English</b> |
554
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_zh-hans.md">简体中文</a> |
555
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_zh-hant.md">繁體中文</a> |
556
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ko.md">한국어</a> |
557
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_es.md">Español</a> |
558
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ja.md">日本語</a> |
559
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_hd.md">हिन्दी</a> |
560
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ru.md">Русский</a> |
561
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_pt-br.md">Português</a> |
562
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_te.md">తెలుగు</a> |
563
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_fr.md">Français</a> |
564
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_de.md">Deutsch</a> |
565
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_vi.md">Tiếng Việt</a> |
566
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ar.md">العربية</a> |
567
+ <a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ur.md">اردو</a> |
568
+ </p>
569
+ </h4>
570
+
571
+ <h3 align="center">
572
+ <p>State-of-the-art pretrained models for inference and training</p>
573
+ </h3>
574
+
575
+ <h3 align="center">
576
+ <img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/transformers_as_a_model_definition.png"/>
577
+ </h3>
578
+
579
+
580
+ Transformers acts as the model-definition framework for state-of-the-art machine learning models in text, computer
581
+ vision, audio, video, and multimodal model, for both inference and training.
582
+
583
+ It centralizes the model definition so that this definition is agreed upon across the ecosystem. `transformers` is the
584
+ pivot across frameworks: if a model definition is supported, it will be compatible with the majority of training
585
+ frameworks (Axolotl, Unsloth, DeepSpeed, FSDP, PyTorch-Lightning, ...), inference engines (vLLM, SGLang, TGI, ...),
586
+ and adjacent modeling libraries (llama.cpp, mlx, ...) which leverage the model definition from `transformers`.
587
+
588
+ We pledge to help support new state-of-the-art models and democratize their usage by having their model definition be
589
+ simple, customizable, and efficient.
590
+
591
+ There are over 1M+ Transformers [model checkpoints](https://huggingface.co/models?library=transformers&sort=trending) on the [Hugging Face Hub](https://huggingface.com/models) you can use.
592
+
593
+ Explore the [Hub](https://huggingface.com/) today to find a model and use Transformers to help you get started right away.
594
+
595
+ ## Installation
596
+
597
+ Transformers works with Python 3.9+ [PyTorch](https://pytorch.org/get-started/locally/) 2.1+, [TensorFlow](https://www.tensorflow.org/install/pip) 2.6+, and [Flax](https://flax.readthedocs.io/en/latest/) 0.4.1+.
598
+
599
+ Create and activate a virtual environment with [venv](https://docs.python.org/3/library/venv.html) or [uv](https://docs.astral.sh/uv/), a fast Rust-based Python package and project manager.
600
+
601
+ ```py
602
+ # venv
603
+ python -m venv .my-env
604
+ source .my-env/bin/activate
605
+ # uv
606
+ uv venv .my-env
607
+ source .my-env/bin/activate
608
+ ```
609
+
610
+ Install Transformers in your virtual environment.
611
+
612
+ ```py
613
+ # pip
614
+ pip install "transformers[torch]"
615
+
616
+ # uv
617
+ uv pip install "transformers[torch]"
618
+ ```
619
+
620
+ Install Transformers from source if you want the latest changes in the library or are interested in contributing. However, the *latest* version may not be stable. Feel free to open an [issue](https://github.com/huggingface/transformers/issues) if you encounter an error.
621
+
622
+ ```shell
623
+ git clone https://github.com/huggingface/transformers.git
624
+ cd transformers
625
+
626
+ # pip
627
+ pip install .[torch]
628
+
629
+ # uv
630
+ uv pip install .[torch]
631
+ ```
632
+
633
+ ## Quickstart
634
+
635
+ Get started with Transformers right away with the [Pipeline](https://huggingface.co/docs/transformers/pipeline_tutorial) API. The `Pipeline` is a high-level inference class that supports text, audio, vision, and multimodal tasks. It handles preprocessing the input and returns the appropriate output.
636
+
637
+ Instantiate a pipeline and specify model to use for text generation. The model is downloaded and cached so you can easily reuse it again. Finally, pass some text to prompt the model.
638
+
639
+ ```py
640
+ from transformers import pipeline
641
+
642
+ pipeline = pipeline(task="text-generation", model="Qwen/Qwen2.5-1.5B")
643
+ pipeline("the secret to baking a really good cake is ")
644
+ [{'generated_text': 'the secret to baking a really good cake is 1) to use the right ingredients and 2) to follow the recipe exactly. the recipe for the cake is as follows: 1 cup of sugar, 1 cup of flour, 1 cup of milk, 1 cup of butter, 1 cup of eggs, 1 cup of chocolate chips. if you want to make 2 cakes, how much sugar do you need? To make 2 cakes, you will need 2 cups of sugar.'}]
645
+ ```
646
+
647
+ To chat with a model, the usage pattern is the same. The only difference is you need to construct a chat history (the input to `Pipeline`) between you and the system.
648
+
649
+ > [!TIP]
650
+ > You can also chat with a model directly from the command line.
651
+ > ```shell
652
+ > transformers chat Qwen/Qwen2.5-0.5B-Instruct
653
+ > ```
654
+
655
+ ```py
656
+ import torch
657
+ from transformers import pipeline
658
+
659
+ chat = [
660
+ {"role": "system", "content": "You are a sassy, wise-cracking robot as imagined by Hollywood circa 1986."},
661
+ {"role": "user", "content": "Hey, can you tell me any fun things to do in New York?"}
662
+ ]
663
+
664
+ pipeline = pipeline(task="text-generation", model="meta-llama/Meta-Llama-3-8B-Instruct", dtype=torch.bfloat16, device_map="auto")
665
+ response = pipeline(chat, max_new_tokens=512)
666
+ print(response[0]["generated_text"][-1]["content"])
667
+ ```
668
+
669
+ Expand the examples below to see how `Pipeline` works for different modalities and tasks.
670
+
671
+ <details>
672
+ <summary>Automatic speech recognition</summary>
673
+
674
+ ```py
675
+ from transformers import pipeline
676
+
677
+ pipeline = pipeline(task="automatic-speech-recognition", model="openai/whisper-large-v3")
678
+ pipeline("https://huggingface.co/datasets/Narsil/asr_dummy/resolve/main/mlk.flac")
679
+ {'text': ' I have a dream that one day this nation will rise up and live out the true meaning of its creed.'}
680
+ ```
681
+
682
+ </details>
683
+
684
+ <details>
685
+ <summary>Image classification</summary>
686
+
687
+ <h3 align="center">
688
+ <a><img src="https://huggingface.co/datasets/Narsil/image_dummy/raw/main/parrots.png"></a>
689
+ </h3>
690
+
691
+ ```py
692
+ from transformers import pipeline
693
+
694
+ pipeline = pipeline(task="image-classification", model="facebook/dinov2-small-imagenet1k-1-layer")
695
+ pipeline("https://huggingface.co/datasets/Narsil/image_dummy/raw/main/parrots.png")
696
+ [{'label': 'macaw', 'score': 0.997848391532898},
697
+ {'label': 'sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita',
698
+ 'score': 0.0016551691805943847},
699
+ {'label': 'lorikeet', 'score': 0.00018523589824326336},
700
+ {'label': 'African grey, African gray, Psittacus erithacus',
701
+ 'score': 7.85409429227002e-05},
702
+ {'label': 'quail', 'score': 5.502637941390276e-05}]
703
+ ```
704
+
705
+ </details>
706
+
707
+ <details>
708
+ <summary>Visual question answering</summary>
709
+
710
+
711
+ <h3 align="center">
712
+ <a><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/idefics-few-shot.jpg"></a>
713
+ </h3>
714
+
715
+ ```py
716
+ from transformers import pipeline
717
+
718
+ pipeline = pipeline(task="visual-question-answering", model="Salesforce/blip-vqa-base")
719
+ pipeline(
720
+ image="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/idefics-few-shot.jpg",
721
+ question="What is in the image?",
722
+ )
723
+ [{'answer': 'statue of liberty'}]
724
+ ```
725
+
726
+ </details>
727
+
728
+ ## Why should I use Transformers?
729
+
730
+ 1. Easy-to-use state-of-the-art models:
731
+ - High performance on natural language understanding & generation, computer vision, audio, video, and multimodal tasks.
732
+ - Low barrier to entry for researchers, engineers, and developers.
733
+ - Few user-facing abstractions with just three classes to learn.
734
+ - A unified API for using all our pretrained models.
735
+
736
+ 1. Lower compute costs, smaller carbon footprint:
737
+ - Share trained models instead of training from scratch.
738
+ - Reduce compute time and production costs.
739
+ - Dozens of model architectures with 1M+ pretrained checkpoints across all modalities.
740
+
741
+ 1. Choose the right framework for every part of a models lifetime:
742
+ - Train state-of-the-art models in 3 lines of code.
743
+ - Move a single model between PyTorch/JAX/TF2.0 frameworks at will.
744
+ - Pick the right framework for training, evaluation, and production.
745
+
746
+ 1. Easily customize a model or an example to your needs:
747
+ - We provide examples for each architecture to reproduce the results published by its original authors.
748
+ - Model internals are exposed as consistently as possible.
749
+ - Model files can be used independently of the library for quick experiments.
750
+
751
+ <a target="_blank" href="https://huggingface.co/enterprise">
752
+ <img alt="Hugging Face Enterprise Hub" src="https://github.com/user-attachments/assets/247fb16d-d251-4583-96c4-d3d76dda4925">
753
+ </a><br>
754
+
755
+ ## Why shouldn't I use Transformers?
756
+
757
+ - This library is not a modular toolbox of building blocks for neural nets. The code in the model files is not refactored with additional abstractions on purpose, so that researchers can quickly iterate on each of the models without diving into additional abstractions/files.
758
+ - The training API is optimized to work with PyTorch models provided by Transformers. For generic machine learning loops, you should use another library like [Accelerate](https://huggingface.co/docs/accelerate).
759
+ - The [example scripts](https://github.com/huggingface/transformers/tree/main/examples) are only *examples*. They may not necessarily work out-of-the-box on your specific use case and you'll need to adapt the code for it to work.
760
+
761
+ ## 100 projects using Transformers
762
+
763
+ Transformers is more than a toolkit to use pretrained models, it's a community of projects built around it and the
764
+ Hugging Face Hub. We want Transformers to enable developers, researchers, students, professors, engineers, and anyone
765
+ else to build their dream projects.
766
+
767
+ In order to celebrate Transformers 100,000 stars, we wanted to put the spotlight on the
768
+ community with the [awesome-transformers](./awesome-transformers.md) page which lists 100
769
+ incredible projects built with Transformers.
770
+
771
+ If you own or use a project that you believe should be part of the list, please open a PR to add it!
772
+
773
+ ## Example models
774
+
775
+ You can test most of our models directly on their [Hub model pages](https://huggingface.co/models).
776
+
777
+ Expand each modality below to see a few example models for various use cases.
778
+
779
+ <details>
780
+ <summary>Audio</summary>
781
+
782
+ - Audio classification with [Whisper](https://huggingface.co/openai/whisper-large-v3-turbo)
783
+ - Automatic speech recognition with [Moonshine](https://huggingface.co/UsefulSensors/moonshine)
784
+ - Keyword spotting with [Wav2Vec2](https://huggingface.co/superb/wav2vec2-base-superb-ks)
785
+ - Speech to speech generation with [Moshi](https://huggingface.co/kyutai/moshiko-pytorch-bf16)
786
+ - Text to audio with [MusicGen](https://huggingface.co/facebook/musicgen-large)
787
+ - Text to speech with [Bark](https://huggingface.co/suno/bark)
788
+
789
+ </details>
790
+
791
+ <details>
792
+ <summary>Computer vision</summary>
793
+
794
+ - Automatic mask generation with [SAM](https://huggingface.co/facebook/sam-vit-base)
795
+ - Depth estimation with [DepthPro](https://huggingface.co/apple/DepthPro-hf)
796
+ - Image classification with [DINO v2](https://huggingface.co/facebook/dinov2-base)
797
+ - Keypoint detection with [SuperPoint](https://huggingface.co/magic-leap-community/superpoint)
798
+ - Keypoint matching with [SuperGlue](https://huggingface.co/magic-leap-community/superglue_outdoor)
799
+ - Object detection with [RT-DETRv2](https://huggingface.co/PekingU/rtdetr_v2_r50vd)
800
+ - Pose Estimation with [VitPose](https://huggingface.co/usyd-community/vitpose-base-simple)
801
+ - Universal segmentation with [OneFormer](https://huggingface.co/shi-labs/oneformer_ade20k_swin_large)
802
+ - Video classification with [VideoMAE](https://huggingface.co/MCG-NJU/videomae-large)
803
+
804
+ </details>
805
+
806
+ <details>
807
+ <summary>Multimodal</summary>
808
+
809
+ - Audio or text to text with [Qwen2-Audio](https://huggingface.co/Qwen/Qwen2-Audio-7B)
810
+ - Document question answering with [LayoutLMv3](https://huggingface.co/microsoft/layoutlmv3-base)
811
+ - Image or text to text with [Qwen-VL](https://huggingface.co/Qwen/Qwen2.5-VL-3B-Instruct)
812
+ - Image captioning [BLIP-2](https://huggingface.co/Salesforce/blip2-opt-2.7b)
813
+ - OCR-based document understanding with [GOT-OCR2](https://huggingface.co/stepfun-ai/GOT-OCR-2.0-hf)
814
+ - Table question answering with [TAPAS](https://huggingface.co/google/tapas-base)
815
+ - Unified multimodal understanding and generation with [Emu3](https://huggingface.co/BAAI/Emu3-Gen)
816
+ - Vision to text with [Llava-OneVision](https://huggingface.co/llava-hf/llava-onevision-qwen2-0.5b-ov-hf)
817
+ - Visual question answering with [Llava](https://huggingface.co/llava-hf/llava-1.5-7b-hf)
818
+ - Visual referring expression segmentation with [Kosmos-2](https://huggingface.co/microsoft/kosmos-2-patch14-224)
819
+
820
+ </details>
821
+
822
+ <details>
823
+ <summary>NLP</summary>
824
+
825
+ - Masked word completion with [ModernBERT](https://huggingface.co/answerdotai/ModernBERT-base)
826
+ - Named entity recognition with [Gemma](https://huggingface.co/google/gemma-2-2b)
827
+ - Question answering with [Mixtral](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1)
828
+ - Summarization with [BART](https://huggingface.co/facebook/bart-large-cnn)
829
+ - Translation with [T5](https://huggingface.co/google-t5/t5-base)
830
+ - Text generation with [Llama](https://huggingface.co/meta-llama/Llama-3.2-1B)
831
+ - Text classification with [Qwen](https://huggingface.co/Qwen/Qwen2.5-0.5B)
832
+
833
+ </details>
834
+
835
+ ## Citation
836
+
837
+ We now have a [paper](https://www.aclweb.org/anthology/2020.emnlp-demos.6/) you can cite for the 🤗 Transformers library:
838
+ ```bibtex
839
+ @inproceedings{wolf-etal-2020-transformers,
840
+ title = "Transformers: State-of-the-Art Natural Language Processing",
841
+ author = "Thomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush",
842
+ booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations",
843
+ month = oct,
844
+ year = "2020",
845
+ address = "Online",
846
+ publisher = "Association for Computational Linguistics",
847
+ url = "https://www.aclweb.org/anthology/2020.emnlp-demos.6",
848
+ pages = "38--45"
849
+ }
850
+ ```
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/RECORD ADDED
The diff for this file is too large to render. See raw diff
 
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/REQUESTED ADDED
File without changes
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/entry_points.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [console_scripts]
2
+ transformers = transformers.commands.transformers_cli:main
3
+ transformers-cli = transformers.commands.transformers_cli:main_cli
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/licenses/LICENSE ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright 2018- The Hugging Face team. All rights reserved.
2
+
3
+ Apache License
4
+ Version 2.0, January 2004
5
+ http://www.apache.org/licenses/
6
+
7
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
8
+
9
+ 1. Definitions.
10
+
11
+ "License" shall mean the terms and conditions for use, reproduction,
12
+ and distribution as defined by Sections 1 through 9 of this document.
13
+
14
+ "Licensor" shall mean the copyright owner or entity authorized by
15
+ the copyright owner that is granting the License.
16
+
17
+ "Legal Entity" shall mean the union of the acting entity and all
18
+ other entities that control, are controlled by, or are under common
19
+ control with that entity. For the purposes of this definition,
20
+ "control" means (i) the power, direct or indirect, to cause the
21
+ direction or management of such entity, whether by contract or
22
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
23
+ outstanding shares, or (iii) beneficial ownership of such entity.
24
+
25
+ "You" (or "Your") shall mean an individual or Legal Entity
26
+ exercising permissions granted by this License.
27
+
28
+ "Source" form shall mean the preferred form for making modifications,
29
+ including but not limited to software source code, documentation
30
+ source, and configuration files.
31
+
32
+ "Object" form shall mean any form resulting from mechanical
33
+ transformation or translation of a Source form, including but
34
+ not limited to compiled object code, generated documentation,
35
+ and conversions to other media types.
36
+
37
+ "Work" shall mean the work of authorship, whether in Source or
38
+ Object form, made available under the License, as indicated by a
39
+ copyright notice that is included in or attached to the work
40
+ (an example is provided in the Appendix below).
41
+
42
+ "Derivative Works" shall mean any work, whether in Source or Object
43
+ form, that is based on (or derived from) the Work and for which the
44
+ editorial revisions, annotations, elaborations, or other modifications
45
+ represent, as a whole, an original work of authorship. For the purposes
46
+ of this License, Derivative Works shall not include works that remain
47
+ separable from, or merely link (or bind by name) to the interfaces of,
48
+ the Work and Derivative Works thereof.
49
+
50
+ "Contribution" shall mean any work of authorship, including
51
+ the original version of the Work and any modifications or additions
52
+ to that Work or Derivative Works thereof, that is intentionally
53
+ submitted to Licensor for inclusion in the Work by the copyright owner
54
+ or by an individual or Legal Entity authorized to submit on behalf of
55
+ the copyright owner. For the purposes of this definition, "submitted"
56
+ means any form of electronic, verbal, or written communication sent
57
+ to the Licensor or its representatives, including but not limited to
58
+ communication on electronic mailing lists, source code control systems,
59
+ and issue tracking systems that are managed by, or on behalf of, the
60
+ Licensor for the purpose of discussing and improving the Work, but
61
+ excluding communication that is conspicuously marked or otherwise
62
+ designated in writing by the copyright owner as "Not a Contribution."
63
+
64
+ "Contributor" shall mean Licensor and any individual or Legal Entity
65
+ on behalf of whom a Contribution has been received by Licensor and
66
+ subsequently incorporated within the Work.
67
+
68
+ 2. Grant of Copyright License. Subject to the terms and conditions of
69
+ this License, each Contributor hereby grants to You a perpetual,
70
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
71
+ copyright license to reproduce, prepare Derivative Works of,
72
+ publicly display, publicly perform, sublicense, and distribute the
73
+ Work and such Derivative Works in Source or Object form.
74
+
75
+ 3. Grant of Patent License. Subject to the terms and conditions of
76
+ this License, each Contributor hereby grants to You a perpetual,
77
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
78
+ (except as stated in this section) patent license to make, have made,
79
+ use, offer to sell, sell, import, and otherwise transfer the Work,
80
+ where such license applies only to those patent claims licensable
81
+ by such Contributor that are necessarily infringed by their
82
+ Contribution(s) alone or by combination of their Contribution(s)
83
+ with the Work to which such Contribution(s) was submitted. If You
84
+ institute patent litigation against any entity (including a
85
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
86
+ or a Contribution incorporated within the Work constitutes direct
87
+ or contributory patent infringement, then any patent licenses
88
+ granted to You under this License for that Work shall terminate
89
+ as of the date such litigation is filed.
90
+
91
+ 4. Redistribution. You may reproduce and distribute copies of the
92
+ Work or Derivative Works thereof in any medium, with or without
93
+ modifications, and in Source or Object form, provided that You
94
+ meet the following conditions:
95
+
96
+ (a) You must give any other recipients of the Work or
97
+ Derivative Works a copy of this License; and
98
+
99
+ (b) You must cause any modified files to carry prominent notices
100
+ stating that You changed the files; and
101
+
102
+ (c) You must retain, in the Source form of any Derivative Works
103
+ that You distribute, all copyright, patent, trademark, and
104
+ attribution notices from the Source form of the Work,
105
+ excluding those notices that do not pertain to any part of
106
+ the Derivative Works; and
107
+
108
+ (d) If the Work includes a "NOTICE" text file as part of its
109
+ distribution, then any Derivative Works that You distribute must
110
+ include a readable copy of the attribution notices contained
111
+ within such NOTICE file, excluding those notices that do not
112
+ pertain to any part of the Derivative Works, in at least one
113
+ of the following places: within a NOTICE text file distributed
114
+ as part of the Derivative Works; within the Source form or
115
+ documentation, if provided along with the Derivative Works; or,
116
+ within a display generated by the Derivative Works, if and
117
+ wherever such third-party notices normally appear. The contents
118
+ of the NOTICE file are for informational purposes only and
119
+ do not modify the License. You may add Your own attribution
120
+ notices within Derivative Works that You distribute, alongside
121
+ or as an addendum to the NOTICE text from the Work, provided
122
+ that such additional attribution notices cannot be construed
123
+ as modifying the License.
124
+
125
+ You may add Your own copyright statement to Your modifications and
126
+ may provide additional or different license terms and conditions
127
+ for use, reproduction, or distribution of Your modifications, or
128
+ for any such Derivative Works as a whole, provided Your use,
129
+ reproduction, and distribution of the Work otherwise complies with
130
+ the conditions stated in this License.
131
+
132
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
133
+ any Contribution intentionally submitted for inclusion in the Work
134
+ by You to the Licensor shall be under the terms and conditions of
135
+ this License, without any additional terms or conditions.
136
+ Notwithstanding the above, nothing herein shall supersede or modify
137
+ the terms of any separate license agreement you may have executed
138
+ with Licensor regarding such Contributions.
139
+
140
+ 6. Trademarks. This License does not grant permission to use the trade
141
+ names, trademarks, service marks, or product names of the Licensor,
142
+ except as required for reasonable and customary use in describing the
143
+ origin of the Work and reproducing the content of the NOTICE file.
144
+
145
+ 7. Disclaimer of Warranty. Unless required by applicable law or
146
+ agreed to in writing, Licensor provides the Work (and each
147
+ Contributor provides its Contributions) on an "AS IS" BASIS,
148
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
149
+ implied, including, without limitation, any warranties or conditions
150
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
151
+ PARTICULAR PURPOSE. You are solely responsible for determining the
152
+ appropriateness of using or redistributing the Work and assume any
153
+ risks associated with Your exercise of permissions under this License.
154
+
155
+ 8. Limitation of Liability. In no event and under no legal theory,
156
+ whether in tort (including negligence), contract, or otherwise,
157
+ unless required by applicable law (such as deliberate and grossly
158
+ negligent acts) or agreed to in writing, shall any Contributor be
159
+ liable to You for damages, including any direct, indirect, special,
160
+ incidental, or consequential damages of any character arising as a
161
+ result of this License or out of the use or inability to use the
162
+ Work (including but not limited to damages for loss of goodwill,
163
+ work stoppage, computer failure or malfunction, or any and all
164
+ other commercial damages or losses), even if such Contributor
165
+ has been advised of the possibility of such damages.
166
+
167
+ 9. Accepting Warranty or Additional Liability. While redistributing
168
+ the Work or Derivative Works thereof, You may choose to offer,
169
+ and charge a fee for, acceptance of support, warranty, indemnity,
170
+ or other liability obligations and/or rights consistent with this
171
+ License. However, in accepting such obligations, You may act only
172
+ on Your own behalf and on Your sole responsibility, not on behalf
173
+ of any other Contributor, and only if You agree to indemnify,
174
+ defend, and hold each Contributor harmless for any liability
175
+ incurred by, or claims asserted against, such Contributor by reason
176
+ of your accepting any such warranty or additional liability.
177
+
178
+ END OF TERMS AND CONDITIONS
179
+
180
+ APPENDIX: How to apply the Apache License to your work.
181
+
182
+ To apply the Apache License to your work, attach the following
183
+ boilerplate notice, with the fields enclosed by brackets "[]"
184
+ replaced with your own identifying information. (Don't include
185
+ the brackets!) The text should be enclosed in the appropriate
186
+ comment syntax for the file format. We also recommend that a
187
+ file or class name and description of purpose be included on the
188
+ same "printed page" as the copyright notice for easier
189
+ identification within third-party archives.
190
+
191
+ Copyright [yyyy] [name of copyright owner]
192
+
193
+ Licensed under the Apache License, Version 2.0 (the "License");
194
+ you may not use this file except in compliance with the License.
195
+ You may obtain a copy of the License at
196
+
197
+ http://www.apache.org/licenses/LICENSE-2.0
198
+
199
+ Unless required by applicable law or agreed to in writing, software
200
+ distributed under the License is distributed on an "AS IS" BASIS,
201
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
202
+ See the License for the specific language governing permissions and
203
+ limitations under the License.
phivenv/Lib/site-packages/transformers-4.56.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ transformers
phivenv/Lib/site-packages/transformers/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (23.8 kB). View file
 
phivenv/Lib/site-packages/transformers/__pycache__/activations.cpython-39.pyc ADDED
Binary file (12.5 kB). View file
 
phivenv/Lib/site-packages/transformers/__pycache__/activations_tf.cpython-39.pyc ADDED
Binary file (4.55 kB). View file