ZTWHHH commited on
Commit
5870fef
·
verified ·
1 Parent(s): 3c17052

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. emu3/lib/python3.10/site-packages/anyio-4.6.2.post1.dist-info/INSTALLER +1 -0
  3. emu3/lib/python3.10/site-packages/anyio-4.6.2.post1.dist-info/RECORD +83 -0
  4. emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/RECORD +0 -0
  5. emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/REQUESTED +0 -0
  6. emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/WHEEL +4 -0
  7. emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/entry_points.txt +3 -0
  8. emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/licenses/LICENSE +201 -0
  9. emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/LICENSE +27 -0
  10. emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/METADATA +233 -0
  11. emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/RECORD +181 -0
  12. emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/REQUESTED +0 -0
  13. emu3/lib/python3.10/site-packages/ninja/__init__.py +55 -0
  14. emu3/lib/python3.10/site-packages/ninja/__main__.py +5 -0
  15. emu3/lib/python3.10/site-packages/ninja/__pycache__/__init__.cpython-310.pyc +0 -0
  16. emu3/lib/python3.10/site-packages/ninja/__pycache__/__main__.cpython-310.pyc +0 -0
  17. emu3/lib/python3.10/site-packages/ninja/__pycache__/_version.cpython-310.pyc +0 -0
  18. emu3/lib/python3.10/site-packages/ninja/__pycache__/ninja_syntax.cpython-310.pyc +0 -0
  19. emu3/lib/python3.10/site-packages/ninja/_version.py +16 -0
  20. emu3/lib/python3.10/site-packages/ninja/ninja_syntax.py +199 -0
  21. emu3/lib/python3.10/site-packages/ninja/py.typed +0 -0
  22. emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/__init__.cpython-310.pyc +0 -0
  23. emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/_constants.cpython-310.pyc +0 -0
  24. emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/_optional.cpython-310.pyc +0 -0
  25. emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-310.pyc +0 -0
  26. emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/pyarrow.cpython-310.pyc +0 -0
  27. emu3/lib/python3.10/site-packages/pandas/compat/numpy/__init__.py +53 -0
  28. emu3/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-310.pyc +0 -0
  29. emu3/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/function.cpython-310.pyc +0 -0
  30. emu3/lib/python3.10/site-packages/pandas/compat/numpy/function.py +418 -0
  31. emu3/lib/python3.10/site-packages/pandas/plotting/__init__.py +98 -0
  32. emu3/lib/python3.10/site-packages/pandas/plotting/__pycache__/__init__.cpython-310.pyc +0 -0
  33. emu3/lib/python3.10/site-packages/pandas/plotting/__pycache__/_core.cpython-310.pyc +0 -0
  34. emu3/lib/python3.10/site-packages/pandas/plotting/__pycache__/_misc.cpython-310.pyc +0 -0
  35. emu3/lib/python3.10/site-packages/pandas/plotting/_core.py +1946 -0
  36. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__init__.py +93 -0
  37. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/__init__.cpython-310.pyc +0 -0
  38. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/boxplot.cpython-310.pyc +0 -0
  39. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/converter.cpython-310.pyc +0 -0
  40. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/core.cpython-310.pyc +0 -0
  41. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/groupby.cpython-310.pyc +0 -0
  42. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/hist.cpython-310.pyc +0 -0
  43. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/misc.cpython-310.pyc +0 -0
  44. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/style.cpython-310.pyc +0 -0
  45. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/timeseries.cpython-310.pyc +0 -0
  46. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/tools.cpython-310.pyc +0 -0
  47. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/boxplot.py +572 -0
  48. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/converter.py +1139 -0
  49. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/core.py +2125 -0
  50. emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/groupby.py +142 -0
.gitattributes CHANGED
@@ -1039,3 +1039,6 @@ emu3/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc
1039
  infer_4_33_0/lib/python3.10/site-packages/pandas/tests/frame/__pycache__/test_constructors.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1040
  infer_4_33_0/lib/python3.10/site-packages/tokenizers.libs/libk5crypto-b1f99d5c.so.3.1 filter=lfs diff=lfs merge=lfs -text
1041
  infer_4_33_0/lib/python3.10/site-packages/opencv_python.libs/libswresample-d02fa90a.so.4.7.100 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
1039
  infer_4_33_0/lib/python3.10/site-packages/pandas/tests/frame/__pycache__/test_constructors.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1040
  infer_4_33_0/lib/python3.10/site-packages/tokenizers.libs/libk5crypto-b1f99d5c.so.3.1 filter=lfs diff=lfs merge=lfs -text
1041
  infer_4_33_0/lib/python3.10/site-packages/opencv_python.libs/libswresample-d02fa90a.so.4.7.100 filter=lfs diff=lfs merge=lfs -text
1042
+ infer_4_33_0/lib/python3.10/site-packages/pyparsing/__pycache__/core.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1043
+ evalkit_cambrian/lib/python3.10/site-packages/scipy.libs/libscipy_openblas-68440149.so filter=lfs diff=lfs merge=lfs -text
1044
+ evalkit_tf446/lib/python3.10/site-packages/torch/lib/libtorch_cuda_linalg.so filter=lfs diff=lfs merge=lfs -text
emu3/lib/python3.10/site-packages/anyio-4.6.2.post1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
emu3/lib/python3.10/site-packages/anyio-4.6.2.post1.dist-info/RECORD ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ anyio-4.6.2.post1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ anyio-4.6.2.post1.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
3
+ anyio-4.6.2.post1.dist-info/METADATA,sha256=-tUagL58CG66oT2eLY1593L_yXsIb6xW0oouVCQsE5c,4697
4
+ anyio-4.6.2.post1.dist-info/RECORD,,
5
+ anyio-4.6.2.post1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ anyio-4.6.2.post1.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
7
+ anyio-4.6.2.post1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
8
+ anyio-4.6.2.post1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
9
+ anyio/__init__.py,sha256=myTIdg75VPwA-9L7BpislRQplJUPMeleUBHa4MyIruw,4315
10
+ anyio/__pycache__/__init__.cpython-310.pyc,,
11
+ anyio/__pycache__/from_thread.cpython-310.pyc,,
12
+ anyio/__pycache__/lowlevel.cpython-310.pyc,,
13
+ anyio/__pycache__/pytest_plugin.cpython-310.pyc,,
14
+ anyio/__pycache__/to_process.cpython-310.pyc,,
15
+ anyio/__pycache__/to_thread.cpython-310.pyc,,
16
+ anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ anyio/_backends/__pycache__/__init__.cpython-310.pyc,,
18
+ anyio/_backends/__pycache__/_asyncio.cpython-310.pyc,,
19
+ anyio/_backends/__pycache__/_trio.cpython-310.pyc,,
20
+ anyio/_backends/_asyncio.py,sha256=H3rMz2wquSxPnV4KIXpXGtBFWXk3jkFljrzvk7KWk4E,91497
21
+ anyio/_backends/_trio.py,sha256=wfgvQ2ut2CAxOjcuDLAdrucfEgc02XXRN9aC3IEBHdY,40311
22
+ anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
+ anyio/_core/__pycache__/__init__.cpython-310.pyc,,
24
+ anyio/_core/__pycache__/_eventloop.cpython-310.pyc,,
25
+ anyio/_core/__pycache__/_exceptions.cpython-310.pyc,,
26
+ anyio/_core/__pycache__/_fileio.cpython-310.pyc,,
27
+ anyio/_core/__pycache__/_resources.cpython-310.pyc,,
28
+ anyio/_core/__pycache__/_signals.cpython-310.pyc,,
29
+ anyio/_core/__pycache__/_sockets.cpython-310.pyc,,
30
+ anyio/_core/__pycache__/_streams.cpython-310.pyc,,
31
+ anyio/_core/__pycache__/_subprocesses.cpython-310.pyc,,
32
+ anyio/_core/__pycache__/_synchronization.cpython-310.pyc,,
33
+ anyio/_core/__pycache__/_tasks.cpython-310.pyc,,
34
+ anyio/_core/__pycache__/_testing.cpython-310.pyc,,
35
+ anyio/_core/__pycache__/_typedattr.cpython-310.pyc,,
36
+ anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695
37
+ anyio/_core/_exceptions.py,sha256=NPxECdXkG4nk3NOCUeFmBEAgPhmj7Bzs4vFAKaW_vqw,2481
38
+ anyio/_core/_fileio.py,sha256=lbGk3xq_6DhvbEI8ykdFf2NjYnhuyc8hjXKZTLYkW4k,20961
39
+ anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
40
+ anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905
41
+ anyio/_core/_sockets.py,sha256=iM3UeMU68n0PlQjl2U9HyiOpV26rnjqV4KBr_Fo2z1I,24293
42
+ anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804
43
+ anyio/_core/_subprocesses.py,sha256=WquR6sHrnaZofaeqnL8U4Yv___msVW_WqivleLHK4zI,7760
44
+ anyio/_core/_synchronization.py,sha256=UDsbG5f8jWsWkRxYUOKp_WOBWCI9-vBO6wBrsR6WNjA,20121
45
+ anyio/_core/_tasks.py,sha256=pvVEX2Fw159sf0ypAPerukKsZgRRwvFFedVW52nR2Vk,4764
46
+ anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118
47
+ anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
48
+ anyio/abc/__init__.py,sha256=U44_s3BglL8BojWQiq0KuokvCqkunIp-ySH3GyRXxAc,2681
49
+ anyio/abc/__pycache__/__init__.cpython-310.pyc,,
50
+ anyio/abc/__pycache__/_eventloop.cpython-310.pyc,,
51
+ anyio/abc/__pycache__/_resources.cpython-310.pyc,,
52
+ anyio/abc/__pycache__/_sockets.cpython-310.pyc,,
53
+ anyio/abc/__pycache__/_streams.cpython-310.pyc,,
54
+ anyio/abc/__pycache__/_subprocesses.cpython-310.pyc,,
55
+ anyio/abc/__pycache__/_tasks.cpython-310.pyc,,
56
+ anyio/abc/__pycache__/_testing.cpython-310.pyc,,
57
+ anyio/abc/_eventloop.py,sha256=kdkLSnizMk3tPq61K109iPUQ6uXpvp1uNsj5aP1s0N8,9619
58
+ anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
59
+ anyio/abc/_sockets.py,sha256=KhWtJxan8jpBXKwPaFeQzI4iRXdFaOIn0HXtDZnaO7U,6262
60
+ anyio/abc/_streams.py,sha256=GzST5Q2zQmxVzdrAqtbSyHNxkPlIC9AzeZJg_YyPAXw,6598
61
+ anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
62
+ anyio/abc/_tasks.py,sha256=0Jc6oIwUjMIVReehF6knOZyAqlgwDt4TP1NQkx4IQGw,2731
63
+ anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
64
+ anyio/from_thread.py,sha256=dbi5TUH45_Sg_jZ8Vv1NJWVohe0WeQ_OaCvXIKveAGg,17478
65
+ anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169
66
+ anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
+ anyio/pytest_plugin.py,sha256=vjGhGRHD31OyMgJRFQrMvExhx3Ea8KbyDqYKmiSDdXA,6712
68
+ anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
+ anyio/streams/__pycache__/__init__.cpython-310.pyc,,
70
+ anyio/streams/__pycache__/buffered.cpython-310.pyc,,
71
+ anyio/streams/__pycache__/file.cpython-310.pyc,,
72
+ anyio/streams/__pycache__/memory.cpython-310.pyc,,
73
+ anyio/streams/__pycache__/stapled.cpython-310.pyc,,
74
+ anyio/streams/__pycache__/text.cpython-310.pyc,,
75
+ anyio/streams/__pycache__/tls.cpython-310.pyc,,
76
+ anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500
77
+ anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383
78
+ anyio/streams/memory.py,sha256=j8AyOExK4-UPaon_Xbhwax25Vqs0DwFg3ZXc-EIiHjY,10550
79
+ anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302
80
+ anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094
81
+ anyio/streams/tls.py,sha256=m3AE2LVSpoRHSIwSoSCupiOVL54EvOFoY3CcwTxcZfg,12742
82
+ anyio/to_process.py,sha256=cR4n7TssbbJowE_9cWme49zaeuoBuMzqgZ6cBIs0YIs,9571
83
+ anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396
emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/RECORD ADDED
The diff for this file is too large to render. See raw diff
 
emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/REQUESTED ADDED
File without changes
emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/WHEEL ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.25.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/entry_points.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [console_scripts]
2
+ gradio = gradio.cli:cli
3
+ upload_theme = gradio.themes.upload_theme:main
emu3/lib/python3.10/site-packages/gradio-4.44.0.dist-info/licenses/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/LICENSE ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) 2005-2021 Fredrik Johansson and mpmath contributors
2
+
3
+ All rights reserved.
4
+
5
+ Redistribution and use in source and binary forms, with or without
6
+ modification, are permitted provided that the following conditions are met:
7
+
8
+ a. Redistributions of source code must retain the above copyright notice,
9
+ this list of conditions and the following disclaimer.
10
+ b. Redistributions in binary form must reproduce the above copyright
11
+ notice, this list of conditions and the following disclaimer in the
12
+ documentation and/or other materials provided with the distribution.
13
+ c. Neither the name of the copyright holder nor the names of its
14
+ contributors may be used to endorse or promote products derived
15
+ from this software without specific prior written permission.
16
+
17
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20
+ ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
21
+ ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
23
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
24
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
+ LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26
+ OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
27
+ DAMAGE.
emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/METADATA ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: mpmath
3
+ Version: 1.3.0
4
+ Summary: Python library for arbitrary-precision floating-point arithmetic
5
+ Home-page: http://mpmath.org/
6
+ Author: Fredrik Johansson
7
+ Author-email: [email protected]
8
+ License: BSD
9
+ Project-URL: Source, https://github.com/fredrik-johansson/mpmath
10
+ Project-URL: Tracker, https://github.com/fredrik-johansson/mpmath/issues
11
+ Project-URL: Documentation, http://mpmath.org/doc/current/
12
+ Classifier: License :: OSI Approved :: BSD License
13
+ Classifier: Topic :: Scientific/Engineering :: Mathematics
14
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
15
+ Classifier: Programming Language :: Python
16
+ Classifier: Programming Language :: Python :: 2
17
+ Classifier: Programming Language :: Python :: 2.7
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Programming Language :: Python :: 3.5
20
+ Classifier: Programming Language :: Python :: 3.6
21
+ Classifier: Programming Language :: Python :: 3.7
22
+ Classifier: Programming Language :: Python :: 3.8
23
+ Classifier: Programming Language :: Python :: 3.9
24
+ Classifier: Programming Language :: Python :: Implementation :: CPython
25
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
26
+ License-File: LICENSE
27
+ Provides-Extra: develop
28
+ Requires-Dist: pytest (>=4.6) ; extra == 'develop'
29
+ Requires-Dist: pycodestyle ; extra == 'develop'
30
+ Requires-Dist: pytest-cov ; extra == 'develop'
31
+ Requires-Dist: codecov ; extra == 'develop'
32
+ Requires-Dist: wheel ; extra == 'develop'
33
+ Provides-Extra: docs
34
+ Requires-Dist: sphinx ; extra == 'docs'
35
+ Provides-Extra: gmpy
36
+ Requires-Dist: gmpy2 (>=2.1.0a4) ; (platform_python_implementation != "PyPy") and extra == 'gmpy'
37
+ Provides-Extra: tests
38
+ Requires-Dist: pytest (>=4.6) ; extra == 'tests'
39
+
40
+ mpmath
41
+ ======
42
+
43
+ |pypi version| |Build status| |Code coverage status| |Zenodo Badge|
44
+
45
+ .. |pypi version| image:: https://img.shields.io/pypi/v/mpmath.svg
46
+ :target: https://pypi.python.org/pypi/mpmath
47
+ .. |Build status| image:: https://github.com/fredrik-johansson/mpmath/workflows/test/badge.svg
48
+ :target: https://github.com/fredrik-johansson/mpmath/actions?workflow=test
49
+ .. |Code coverage status| image:: https://codecov.io/gh/fredrik-johansson/mpmath/branch/master/graph/badge.svg
50
+ :target: https://codecov.io/gh/fredrik-johansson/mpmath
51
+ .. |Zenodo Badge| image:: https://zenodo.org/badge/2934512.svg
52
+ :target: https://zenodo.org/badge/latestdoi/2934512
53
+
54
+ A Python library for arbitrary-precision floating-point arithmetic.
55
+
56
+ Website: http://mpmath.org/
57
+ Main author: Fredrik Johansson <[email protected]>
58
+
59
+ Mpmath is free software released under the New BSD License (see the
60
+ LICENSE file for details)
61
+
62
+ 0. History and credits
63
+ ----------------------
64
+
65
+ The following people (among others) have contributed major patches
66
+ or new features to mpmath:
67
+
68
+ * Pearu Peterson <[email protected]>
69
+ * Mario Pernici <[email protected]>
70
+ * Ondrej Certik <[email protected]>
71
+ * Vinzent Steinberg <[email protected]>
72
+ * Nimish Telang <[email protected]>
73
+ * Mike Taschuk <[email protected]>
74
+ * Case Van Horsen <[email protected]>
75
+ * Jorn Baayen <[email protected]>
76
+ * Chris Smith <[email protected]>
77
+ * Juan Arias de Reyna <[email protected]>
78
+ * Ioannis Tziakos <[email protected]>
79
+ * Aaron Meurer <[email protected]>
80
+ * Stefan Krastanov <[email protected]>
81
+ * Ken Allen <[email protected]>
82
+ * Timo Hartmann <[email protected]>
83
+ * Sergey B Kirpichev <[email protected]>
84
+ * Kris Kuhlman <[email protected]>
85
+ * Paul Masson <[email protected]>
86
+ * Michael Kagalenko <[email protected]>
87
+ * Jonathan Warner <[email protected]>
88
+ * Max Gaukler <[email protected]>
89
+ * Guillermo Navas-Palencia <[email protected]>
90
+ * Nike Dattani <[email protected]>
91
+
92
+ Numerous other people have contributed by reporting bugs,
93
+ requesting new features, or suggesting improvements to the
94
+ documentation.
95
+
96
+ For a detailed changelog, including individual contributions,
97
+ see the CHANGES file.
98
+
99
+ Fredrik's work on mpmath during summer 2008 was sponsored by Google
100
+ as part of the Google Summer of Code program.
101
+
102
+ Fredrik's work on mpmath during summer 2009 was sponsored by the
103
+ American Institute of Mathematics under the support of the National Science
104
+ Foundation Grant No. 0757627 (FRG: L-functions and Modular Forms).
105
+
106
+ Any opinions, findings, and conclusions or recommendations expressed in this
107
+ material are those of the author(s) and do not necessarily reflect the
108
+ views of the sponsors.
109
+
110
+ Credit also goes to:
111
+
112
+ * The authors of the GMP library and the Python wrapper
113
+ gmpy, enabling mpmath to become much faster at
114
+ high precision
115
+ * The authors of MPFR, pari/gp, MPFUN, and other arbitrary-
116
+ precision libraries, whose documentation has been helpful
117
+ for implementing many of the algorithms in mpmath
118
+ * Wikipedia contributors; Abramowitz & Stegun; Gradshteyn & Ryzhik;
119
+ Wolfram Research for MathWorld and the Wolfram Functions site.
120
+ These are the main references used for special functions
121
+ implementations.
122
+ * George Brandl for developing the Sphinx documentation tool
123
+ used to build mpmath's documentation
124
+
125
+ Release history:
126
+
127
+ * Version 1.3.0 released on March 7, 2023
128
+ * Version 1.2.0 released on February 1, 2021
129
+ * Version 1.1.0 released on December 11, 2018
130
+ * Version 1.0.0 released on September 27, 2017
131
+ * Version 0.19 released on June 10, 2014
132
+ * Version 0.18 released on December 31, 2013
133
+ * Version 0.17 released on February 1, 2011
134
+ * Version 0.16 released on September 24, 2010
135
+ * Version 0.15 released on June 6, 2010
136
+ * Version 0.14 released on February 5, 2010
137
+ * Version 0.13 released on August 13, 2009
138
+ * Version 0.12 released on June 9, 2009
139
+ * Version 0.11 released on January 26, 2009
140
+ * Version 0.10 released on October 15, 2008
141
+ * Version 0.9 released on August 23, 2008
142
+ * Version 0.8 released on April 20, 2008
143
+ * Version 0.7 released on March 12, 2008
144
+ * Version 0.6 released on January 13, 2008
145
+ * Version 0.5 released on November 24, 2007
146
+ * Version 0.4 released on November 3, 2007
147
+ * Version 0.3 released on October 5, 2007
148
+ * Version 0.2 released on October 2, 2007
149
+ * Version 0.1 released on September 27, 2007
150
+
151
+ 1. Download & installation
152
+ --------------------------
153
+
154
+ Mpmath requires Python 2.7 or 3.5 (or later versions). It has been tested
155
+ with CPython 2.7, 3.5 through 3.7 and for PyPy.
156
+
157
+ The latest release of mpmath can be downloaded from the mpmath
158
+ website and from https://github.com/fredrik-johansson/mpmath/releases
159
+
160
+ It should also be available in the Python Package Index at
161
+ https://pypi.python.org/pypi/mpmath
162
+
163
+ To install latest release of Mpmath with pip, simply run
164
+
165
+ ``pip install mpmath``
166
+
167
+ Or unpack the mpmath archive and run
168
+
169
+ ``python setup.py install``
170
+
171
+ Mpmath can also be installed using
172
+
173
+ ``python -m easy_install mpmath``
174
+
175
+ The latest development code is available from
176
+ https://github.com/fredrik-johansson/mpmath
177
+
178
+ See the main documentation for more detailed instructions.
179
+
180
+ 2. Running tests
181
+ ----------------
182
+
183
+ The unit tests in mpmath/tests/ can be run via the script
184
+ runtests.py, but it is recommended to run them with py.test
185
+ (https://pytest.org/), especially
186
+ to generate more useful reports in case there are failures.
187
+
188
+ You may also want to check out the demo scripts in the demo
189
+ directory.
190
+
191
+ The master branch is automatically tested by Travis CI.
192
+
193
+ 3. Documentation
194
+ ----------------
195
+
196
+ Documentation in reStructuredText format is available in the
197
+ doc directory included with the source package. These files
198
+ are human-readable, but can be compiled to prettier HTML using
199
+ the build.py script (requires Sphinx, http://sphinx.pocoo.org/).
200
+
201
+ See setup.txt in the documentation for more information.
202
+
203
+ The most recent documentation is also available in HTML format:
204
+
205
+ http://mpmath.org/doc/current/
206
+
207
+ 4. Known problems
208
+ -----------------
209
+
210
+ Mpmath is a work in progress. Major issues include:
211
+
212
+ * Some functions may return incorrect values when given extremely
213
+ large arguments or arguments very close to singularities.
214
+
215
+ * Directed rounding works for arithmetic operations. It is implemented
216
+ heuristically for other operations, and their results may be off by one
217
+ or two units in the last place (even if otherwise accurate).
218
+
219
+ * Some IEEE 754 features are not available. Inifinities and NaN are
220
+ partially supported; denormal rounding is currently not available
221
+ at all.
222
+
223
+ * The interface for switching precision and rounding is not finalized.
224
+ The current method is not threadsafe.
225
+
226
+ 5. Help and bug reports
227
+ -----------------------
228
+
229
+ General questions and comments can be sent to the mpmath mailinglist,
230
231
+
232
+ You can also report bugs and send patches to the mpmath issue tracker,
233
+ https://github.com/fredrik-johansson/mpmath/issues
emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/RECORD ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mpmath-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ mpmath-1.3.0.dist-info/LICENSE,sha256=wmyugdpFCOXiSZhXd6M4IfGDIj67dNf4z7-Q_n7vL7c,1537
3
+ mpmath-1.3.0.dist-info/METADATA,sha256=RLZupES5wNGa6UgV01a_BHrmtoDBkmi1wmVofNaoFAY,8630
4
+ mpmath-1.3.0.dist-info/RECORD,,
5
+ mpmath-1.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ mpmath-1.3.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
7
+ mpmath-1.3.0.dist-info/top_level.txt,sha256=BUVWrh8EVlkOhM1n3X9S8msTaVcC-3s6Sjt60avHYus,7
8
+ mpmath/__init__.py,sha256=skFYTSwfwDBLChAV6pI3SdewgAQR3UBtyrfIK_Jdn-g,8765
9
+ mpmath/__pycache__/__init__.cpython-310.pyc,,
10
+ mpmath/__pycache__/ctx_base.cpython-310.pyc,,
11
+ mpmath/__pycache__/ctx_fp.cpython-310.pyc,,
12
+ mpmath/__pycache__/ctx_iv.cpython-310.pyc,,
13
+ mpmath/__pycache__/ctx_mp.cpython-310.pyc,,
14
+ mpmath/__pycache__/ctx_mp_python.cpython-310.pyc,,
15
+ mpmath/__pycache__/function_docs.cpython-310.pyc,,
16
+ mpmath/__pycache__/identification.cpython-310.pyc,,
17
+ mpmath/__pycache__/math2.cpython-310.pyc,,
18
+ mpmath/__pycache__/rational.cpython-310.pyc,,
19
+ mpmath/__pycache__/usertools.cpython-310.pyc,,
20
+ mpmath/__pycache__/visualization.cpython-310.pyc,,
21
+ mpmath/calculus/__init__.py,sha256=UAgCIJ1YmaeyTqpNzjBlCZGeIzLtUZMEEpl99VWNjus,162
22
+ mpmath/calculus/__pycache__/__init__.cpython-310.pyc,,
23
+ mpmath/calculus/__pycache__/approximation.cpython-310.pyc,,
24
+ mpmath/calculus/__pycache__/calculus.cpython-310.pyc,,
25
+ mpmath/calculus/__pycache__/differentiation.cpython-310.pyc,,
26
+ mpmath/calculus/__pycache__/extrapolation.cpython-310.pyc,,
27
+ mpmath/calculus/__pycache__/inverselaplace.cpython-310.pyc,,
28
+ mpmath/calculus/__pycache__/odes.cpython-310.pyc,,
29
+ mpmath/calculus/__pycache__/optimization.cpython-310.pyc,,
30
+ mpmath/calculus/__pycache__/polynomials.cpython-310.pyc,,
31
+ mpmath/calculus/__pycache__/quadrature.cpython-310.pyc,,
32
+ mpmath/calculus/approximation.py,sha256=vyzu3YI6r63Oq1KFHrQz02mGXAcH23emqNYhJuUaFZ4,8817
33
+ mpmath/calculus/calculus.py,sha256=A0gSp0hxSyEDfugJViY3CeWalF-vK701YftzrjSQzQ4,112
34
+ mpmath/calculus/differentiation.py,sha256=2L6CBj8xtX9iip98NPbKsLtwtRjxi571wYmTMHFeL90,20226
35
+ mpmath/calculus/extrapolation.py,sha256=xM0rvk2DFEF4iR1Jhl-Y3aS93iW9VVJX7y9IGpmzC-A,73306
36
+ mpmath/calculus/inverselaplace.py,sha256=5-pn8N_t0PtgBTXixsXZ4xxrihK2J5gYsVfTKfDx4gA,36056
37
+ mpmath/calculus/odes.py,sha256=gaHiw7IJjsONNTAa6izFPZpmcg9uyTp8MULnGdzTIGo,9908
38
+ mpmath/calculus/optimization.py,sha256=bKnShXElBOmVOIOlFeksDsYCp9fYSmYwKmXDt0z26MM,32856
39
+ mpmath/calculus/polynomials.py,sha256=D16BhU_SHbVi06IxNwABHR-H77IylndNsN3muPTuFYs,7877
40
+ mpmath/calculus/quadrature.py,sha256=n-avtS8E43foV-5tr5lofgOBaiMUYE8AJjQcWI9QcKk,42432
41
+ mpmath/ctx_base.py,sha256=rfjmfMyA55x8R_cWFINUwWVTElfZmyx5erKDdauSEVw,15985
42
+ mpmath/ctx_fp.py,sha256=ctUjx_NoU0iFWk05cXDYCL2ZtLZOlWs1n6Zao3pbG2g,6572
43
+ mpmath/ctx_iv.py,sha256=tqdMr-GDfkZk1EhoGeCAajy7pQv-RWtrVqhYjfI8r4g,17211
44
+ mpmath/ctx_mp.py,sha256=d3r4t7xHNqSFtmqsA9Btq1Npy3WTM-pcM2_jeCyECxY,49452
45
+ mpmath/ctx_mp_python.py,sha256=3olYWo4lk1SnQ0A_IaZ181qqG8u5pxGat_v-L4Qtn3Y,37815
46
+ mpmath/function_docs.py,sha256=g4PP8n6ILXmHcLyA50sxK6Tmp_Z4_pRN-wDErU8D1i4,283512
47
+ mpmath/functions/__init__.py,sha256=YXVdhqv-6LKm6cr5xxtTNTtuD9zDPKGQl8GmS0xz2xo,330
48
+ mpmath/functions/__pycache__/__init__.cpython-310.pyc,,
49
+ mpmath/functions/__pycache__/bessel.cpython-310.pyc,,
50
+ mpmath/functions/__pycache__/elliptic.cpython-310.pyc,,
51
+ mpmath/functions/__pycache__/expintegrals.cpython-310.pyc,,
52
+ mpmath/functions/__pycache__/factorials.cpython-310.pyc,,
53
+ mpmath/functions/__pycache__/functions.cpython-310.pyc,,
54
+ mpmath/functions/__pycache__/hypergeometric.cpython-310.pyc,,
55
+ mpmath/functions/__pycache__/orthogonal.cpython-310.pyc,,
56
+ mpmath/functions/__pycache__/qfunctions.cpython-310.pyc,,
57
+ mpmath/functions/__pycache__/rszeta.cpython-310.pyc,,
58
+ mpmath/functions/__pycache__/signals.cpython-310.pyc,,
59
+ mpmath/functions/__pycache__/theta.cpython-310.pyc,,
60
+ mpmath/functions/__pycache__/zeta.cpython-310.pyc,,
61
+ mpmath/functions/__pycache__/zetazeros.cpython-310.pyc,,
62
+ mpmath/functions/bessel.py,sha256=dUPLu8frlK-vmf3-irX_7uvwyw4xccv6EIizmIZ88kM,37938
63
+ mpmath/functions/elliptic.py,sha256=qz0yVMb4lWEeOTDL_DWz5u5awmGIPKAsuZFJXgwHJNU,42237
64
+ mpmath/functions/expintegrals.py,sha256=75X_MRdYc1F_X73bgNiOJqwRlS2hqAzcFLl3RM2tCDc,11644
65
+ mpmath/functions/factorials.py,sha256=8_6kCR7e4k1GwxiAOJu0NRadeF4jA28qx4hidhu4ILk,5273
66
+ mpmath/functions/functions.py,sha256=ub2JExvqzCWLkm5yAm72Fr6fdWmZZUknq9_3w9MEigI,18100
67
+ mpmath/functions/hypergeometric.py,sha256=Z0OMAMC4ylK42n_SnamyFVnUx6zHLyCLCoJDSZ1JrHY,51570
68
+ mpmath/functions/orthogonal.py,sha256=FabkxKfBoSseA5flWu1a3re-2BYaew9augqIsT8LaLw,16097
69
+ mpmath/functions/qfunctions.py,sha256=a3EHGKQt_jMd4x9I772Jz-TGFnGY-arWqPvZGz9QSe0,7633
70
+ mpmath/functions/rszeta.py,sha256=yuUVp4ilIyDmXyE3WTBxDDjwfEJNypJnbPS-xPH5How,46184
71
+ mpmath/functions/signals.py,sha256=ELotwQaW1CDpv-eeJzOZ5c23NhfaZcj9_Gkb3psvS0Q,703
72
+ mpmath/functions/theta.py,sha256=KggOocczoMG6_HMoal4oEP7iZ4SKOou9JFE-WzY2r3M,37320
73
+ mpmath/functions/zeta.py,sha256=ue7JY7GXA0oX8q08sQJl2CSRrZ7kOt8HsftpVjnTwrE,36410
74
+ mpmath/functions/zetazeros.py,sha256=uq6TVyZBcY2MLX7VSdVfn0TOkowBLM9fXtnySEwaNzw,30858
75
+ mpmath/identification.py,sha256=7aMdngRAaeL_MafDUNbmEIlGQSklHDZ8pmPFt-OLgkw,29253
76
+ mpmath/libmp/__init__.py,sha256=UCDjLZw4brbklaCmSixCcPdLdHkz8sF_-6F_wr0duAg,3790
77
+ mpmath/libmp/__pycache__/__init__.cpython-310.pyc,,
78
+ mpmath/libmp/__pycache__/backend.cpython-310.pyc,,
79
+ mpmath/libmp/__pycache__/gammazeta.cpython-310.pyc,,
80
+ mpmath/libmp/__pycache__/libelefun.cpython-310.pyc,,
81
+ mpmath/libmp/__pycache__/libhyper.cpython-310.pyc,,
82
+ mpmath/libmp/__pycache__/libintmath.cpython-310.pyc,,
83
+ mpmath/libmp/__pycache__/libmpc.cpython-310.pyc,,
84
+ mpmath/libmp/__pycache__/libmpf.cpython-310.pyc,,
85
+ mpmath/libmp/__pycache__/libmpi.cpython-310.pyc,,
86
+ mpmath/libmp/backend.py,sha256=26A8pUkaGov26vrrFNQVyWJ5LDtK8sl3UHrYLecaTjA,3360
87
+ mpmath/libmp/gammazeta.py,sha256=Xqdw6PMoswDaSca_sOs-IglRuk3fb8c9p43M_lbcrlc,71469
88
+ mpmath/libmp/libelefun.py,sha256=joBZP4FOdxPfieWso1LPtSr6dHydpG_LQiF_bYQYWMg,43861
89
+ mpmath/libmp/libhyper.py,sha256=J9fmdDF6u27EcssEWvBuVaAa3hFjPvPN1SgRgu1dEbc,36624
90
+ mpmath/libmp/libintmath.py,sha256=aIRT0rkUZ_sdGQf3TNCLd-pBMvtQWjssbvFLfK7U0jc,16688
91
+ mpmath/libmp/libmpc.py,sha256=KBndUjs5YVS32-Id3fflDfYgpdW1Prx6zfo8Ez5Qbrs,26875
92
+ mpmath/libmp/libmpf.py,sha256=vpP0kNVkScbCVoZogJ4Watl4I7Ce0d4dzHVjfVe57so,45021
93
+ mpmath/libmp/libmpi.py,sha256=u0I5Eiwkqa-4-dXETi5k7MuaxBeZbvCAPFtl93U9YF0,27622
94
+ mpmath/math2.py,sha256=O5Dglg81SsW0wfHDUJcXOD8-cCaLvbVIvyw0sVmRbpI,18561
95
+ mpmath/matrices/__init__.py,sha256=ETzGDciYbq9ftiKwaMbJ15EI-KNXHrzRb-ZHehhqFjs,94
96
+ mpmath/matrices/__pycache__/__init__.cpython-310.pyc,,
97
+ mpmath/matrices/__pycache__/calculus.cpython-310.pyc,,
98
+ mpmath/matrices/__pycache__/eigen.cpython-310.pyc,,
99
+ mpmath/matrices/__pycache__/eigen_symmetric.cpython-310.pyc,,
100
+ mpmath/matrices/__pycache__/linalg.cpython-310.pyc,,
101
+ mpmath/matrices/__pycache__/matrices.cpython-310.pyc,,
102
+ mpmath/matrices/calculus.py,sha256=PNRq-p2nxgT-fzC54K2depi8ddhdx6Q86G8qpUiHeUY,18609
103
+ mpmath/matrices/eigen.py,sha256=GbDXI3CixzEdXxr1G86uUWkAngAvd-05MmSQ-Tsu_5k,24394
104
+ mpmath/matrices/eigen_symmetric.py,sha256=FPKPeQr1cGYw6Y6ea32a1YdEWQDLP6JlQHEA2WfNLYg,58534
105
+ mpmath/matrices/linalg.py,sha256=04C3ijzMFom7ob5fXBCDfyPPdo3BIboIeE8x2A6vqF0,26958
106
+ mpmath/matrices/matrices.py,sha256=o78Eq62EHQnxcsR0LBoWDEGREOoN4L2iDM1q3dQrw0o,32331
107
+ mpmath/rational.py,sha256=64d56fvZXngYZT7nOAHeFRUX77eJ1A0R3rpfWBU-mSo,5976
108
+ mpmath/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
109
+ mpmath/tests/__pycache__/__init__.cpython-310.pyc,,
110
+ mpmath/tests/__pycache__/extratest_gamma.cpython-310.pyc,,
111
+ mpmath/tests/__pycache__/extratest_zeta.cpython-310.pyc,,
112
+ mpmath/tests/__pycache__/runtests.cpython-310.pyc,,
113
+ mpmath/tests/__pycache__/test_basic_ops.cpython-310.pyc,,
114
+ mpmath/tests/__pycache__/test_bitwise.cpython-310.pyc,,
115
+ mpmath/tests/__pycache__/test_calculus.cpython-310.pyc,,
116
+ mpmath/tests/__pycache__/test_compatibility.cpython-310.pyc,,
117
+ mpmath/tests/__pycache__/test_convert.cpython-310.pyc,,
118
+ mpmath/tests/__pycache__/test_diff.cpython-310.pyc,,
119
+ mpmath/tests/__pycache__/test_division.cpython-310.pyc,,
120
+ mpmath/tests/__pycache__/test_eigen.cpython-310.pyc,,
121
+ mpmath/tests/__pycache__/test_eigen_symmetric.cpython-310.pyc,,
122
+ mpmath/tests/__pycache__/test_elliptic.cpython-310.pyc,,
123
+ mpmath/tests/__pycache__/test_fp.cpython-310.pyc,,
124
+ mpmath/tests/__pycache__/test_functions.cpython-310.pyc,,
125
+ mpmath/tests/__pycache__/test_functions2.cpython-310.pyc,,
126
+ mpmath/tests/__pycache__/test_gammazeta.cpython-310.pyc,,
127
+ mpmath/tests/__pycache__/test_hp.cpython-310.pyc,,
128
+ mpmath/tests/__pycache__/test_identify.cpython-310.pyc,,
129
+ mpmath/tests/__pycache__/test_interval.cpython-310.pyc,,
130
+ mpmath/tests/__pycache__/test_levin.cpython-310.pyc,,
131
+ mpmath/tests/__pycache__/test_linalg.cpython-310.pyc,,
132
+ mpmath/tests/__pycache__/test_matrices.cpython-310.pyc,,
133
+ mpmath/tests/__pycache__/test_mpmath.cpython-310.pyc,,
134
+ mpmath/tests/__pycache__/test_ode.cpython-310.pyc,,
135
+ mpmath/tests/__pycache__/test_pickle.cpython-310.pyc,,
136
+ mpmath/tests/__pycache__/test_power.cpython-310.pyc,,
137
+ mpmath/tests/__pycache__/test_quad.cpython-310.pyc,,
138
+ mpmath/tests/__pycache__/test_rootfinding.cpython-310.pyc,,
139
+ mpmath/tests/__pycache__/test_special.cpython-310.pyc,,
140
+ mpmath/tests/__pycache__/test_str.cpython-310.pyc,,
141
+ mpmath/tests/__pycache__/test_summation.cpython-310.pyc,,
142
+ mpmath/tests/__pycache__/test_trig.cpython-310.pyc,,
143
+ mpmath/tests/__pycache__/test_visualization.cpython-310.pyc,,
144
+ mpmath/tests/__pycache__/torture.cpython-310.pyc,,
145
+ mpmath/tests/extratest_gamma.py,sha256=xidhXUelILcxtiPGoTBHjqUOKIJzEaZ_v3nntGQyWZQ,7228
146
+ mpmath/tests/extratest_zeta.py,sha256=sg10j9RhjBpV2EdUqyYhGV2ERWvM--EvwwGIz6HTmlw,1003
147
+ mpmath/tests/runtests.py,sha256=7NUV82F3K_5AhU8mCLUFf5OibtT7uloFCwPyM3l71wM,5189
148
+ mpmath/tests/test_basic_ops.py,sha256=dsB8DRG-GrPzBaZ-bIauYabaeqXbfqBo9SIP9BqcTSs,15348
149
+ mpmath/tests/test_bitwise.py,sha256=-nLYhgQbhDza3SQM63BhktYntACagqMYx9ib3dPnTKM,7686
150
+ mpmath/tests/test_calculus.py,sha256=4oxtNfMpO4RLLoOzrv7r9-h8BcqfBsJIE6UpsHe7c4w,9187
151
+ mpmath/tests/test_compatibility.py,sha256=_t3ASZ3jhfAMnN1voWX7PDNIDzn-3PokkJGIdT1x7y0,2306
152
+ mpmath/tests/test_convert.py,sha256=JPcDcTJIWh5prIxjx5DM1aNWgqlUoF2KpHvAgK3uHi4,8834
153
+ mpmath/tests/test_diff.py,sha256=qjiF8NxQ8vueuZ5ZHGPQ-kjcj_I7Jh_fEdFtaA8DzEI,2466
154
+ mpmath/tests/test_division.py,sha256=6lUeZfmaBWvvszdqlWLMHgXPjVsxvW1WZpd4-jFWCpU,5340
155
+ mpmath/tests/test_eigen.py,sha256=2mnqVATGbsJkvSVHPpitfAk881twFfb3LsO3XikV9Hs,3905
156
+ mpmath/tests/test_eigen_symmetric.py,sha256=v0VimCicIU2owASDMBaP-t-30uq-pXcsglt95KBtNO4,8778
157
+ mpmath/tests/test_elliptic.py,sha256=Kjiwq9Bb6N_OOzzWewGQ1M_PMa7vRs42V0t90gloZxo,26225
158
+ mpmath/tests/test_fp.py,sha256=AJo0FTyH4BuUnUsv176LD956om308KGYndy-b54KGxM,89997
159
+ mpmath/tests/test_functions.py,sha256=b47VywdomoOX6KmMmz9-iv2IqVIydwKSuUw2pWlFHrY,30955
160
+ mpmath/tests/test_functions2.py,sha256=vlw2RWhL1oTcifnOMDx1a_YzN96UgNNIE5STeKRv1HY,96990
161
+ mpmath/tests/test_gammazeta.py,sha256=AB34O0DV7AlEf9Z4brnCadeQU5-uAwhWRw5FZas65DA,27917
162
+ mpmath/tests/test_hp.py,sha256=6hcENu6Te2klPEiTSeLBIRPlH7PADlJwFKbx8xpnOhg,10461
163
+ mpmath/tests/test_identify.py,sha256=lGUIPfrB2paTg0cFUo64GmMzF77F9gs9FQjX7gxGHV8,692
164
+ mpmath/tests/test_interval.py,sha256=TjYd7a9ca6iRJiLjw06isLeZTuGoGAPmgleDZ0cYfJ0,17527
165
+ mpmath/tests/test_levin.py,sha256=P8M11yV1dj_gdSNv5xuwCzFiF86QyRDtPMjURy6wJ28,5090
166
+ mpmath/tests/test_linalg.py,sha256=miKEnwB8iwWV13hi1bF1cg3hgB4rTKOR0fvDVfWmXds,10440
167
+ mpmath/tests/test_matrices.py,sha256=qyA4Ml2CvNvW034lzB01G6wVgNr7UrgZqh2wkMXtpzM,7944
168
+ mpmath/tests/test_mpmath.py,sha256=LVyJUeofiaxW-zLKWVBCz59L9UQsjlW0Ts9_oBiEv_4,196
169
+ mpmath/tests/test_ode.py,sha256=zAxexBH4fnmFNO4bvEHbug1NJWC5zqfFaVDlYijowkY,1822
170
+ mpmath/tests/test_pickle.py,sha256=Y8CKmDLFsJHUqG8CDaBw5ilrPP4YT1xijVduLpQ7XFE,401
171
+ mpmath/tests/test_power.py,sha256=sz_K02SmNxpa6Kb1uJLN_N4tXTJGdQ___vPRshEN7Gk,5227
172
+ mpmath/tests/test_quad.py,sha256=49Ltft0vZ_kdKLL5s-Kj-BzAVoF5LPVEUeNUzdOkghI,3893
173
+ mpmath/tests/test_rootfinding.py,sha256=umQegEaKHmYOEl5jEyoD-VLKDtXsTJJkepKEr4c0dC0,3132
174
+ mpmath/tests/test_special.py,sha256=YbMIoMIkJEvvKYIzS0CXthJFG0--j6un7-tcE6b7FPM,2848
175
+ mpmath/tests/test_str.py,sha256=0WsGD9hMPRi8zcuYMA9Cu2mOvQiCFskPwMsMf8lBDK4,544
176
+ mpmath/tests/test_summation.py,sha256=fdNlsvRVOsbWxbhlyDLDaEO2S8kTJrRMKIvB5-aNci0,2035
177
+ mpmath/tests/test_trig.py,sha256=zPtkIEnZaThxcWur4k7BX8-2Jmj-AhO191Svv7ANYUU,4799
178
+ mpmath/tests/test_visualization.py,sha256=1PqtkoUx-WsKYgTRiu5o9pBc85kwhf1lzU2eobDQCJM,944
179
+ mpmath/tests/torture.py,sha256=LD95oES7JY2KroELK-m-jhvtbvZaKChnt0Cq7kFMNCw,7868
180
+ mpmath/usertools.py,sha256=a-TDw7XSRsPdBEffxOooDV4WDFfuXnO58P75dcAD87I,3029
181
+ mpmath/visualization.py,sha256=pnnbjcd9AhFVRBZavYX5gjx4ytK_kXoDDisYR6EpXhs,10627
emu3/lib/python3.10/site-packages/mpmath-1.3.0.dist-info/REQUESTED ADDED
File without changes
emu3/lib/python3.10/site-packages/ninja/__init__.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ import os
3
+ import platform
4
+ import subprocess
5
+ import sys
6
+
7
+ from ._version import version as __version__
8
+
9
+ __all__ = ["__version__", "DATA", "BIN_DIR", "ninja"]
10
+
11
+
12
+ def __dir__():
13
+ return __all__
14
+
15
+
16
+ try:
17
+ from .ninja_syntax import Writer, escape, expand
18
+ except ImportError:
19
+ # Support importing `ninja_syntax` from the source tree
20
+ if not os.path.exists(
21
+ os.path.join(os.path.dirname(__file__), 'ninja_syntax.py')):
22
+ sys.path.insert(0, os.path.abspath(os.path.join(
23
+ os.path.dirname(__file__), '../../Ninja-src/misc')))
24
+ from ninja_syntax import Writer, escape, expand # noqa: F401
25
+
26
+ DATA = os.path.join(os.path.dirname(__file__), 'data')
27
+
28
+ # Support running tests from the source tree
29
+ if not os.path.exists(DATA):
30
+ from skbuild.constants import CMAKE_INSTALL_DIR as SKBUILD_CMAKE_INSTALL_DIR
31
+ from skbuild.constants import set_skbuild_plat_name
32
+
33
+ if platform.system().lower() == "darwin":
34
+ # Since building the project specifying --plat-name or CMAKE_OSX_* variables
35
+ # leads to different SKBUILD_DIR, the code below attempt to guess the most
36
+ # likely plat-name.
37
+ _skbuild_dirs = os.listdir(os.path.join(os.path.dirname(__file__), '..', '..', '_skbuild'))
38
+ if _skbuild_dirs:
39
+ _likely_plat_name = '-'.join(_skbuild_dirs[0].split('-')[:3])
40
+ set_skbuild_plat_name(_likely_plat_name)
41
+
42
+ _data = os.path.abspath(os.path.join(
43
+ os.path.dirname(__file__), '..', '..', SKBUILD_CMAKE_INSTALL_DIR(), 'src/ninja/data'))
44
+ if os.path.exists(_data):
45
+ DATA = _data
46
+
47
+ BIN_DIR = os.path.join(DATA, 'bin')
48
+
49
+
50
+ def _program(name, args):
51
+ return subprocess.call([os.path.join(BIN_DIR, name)] + args, close_fds=False)
52
+
53
+
54
+ def ninja():
55
+ raise SystemExit(_program('ninja', sys.argv[1:]))
emu3/lib/python3.10/site-packages/ninja/__main__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ from ninja import ninja
3
+
4
+ if __name__ == '__main__':
5
+ ninja()
emu3/lib/python3.10/site-packages/ninja/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.57 kB). View file
 
emu3/lib/python3.10/site-packages/ninja/__pycache__/__main__.cpython-310.pyc ADDED
Binary file (227 Bytes). View file
 
emu3/lib/python3.10/site-packages/ninja/__pycache__/_version.cpython-310.pyc ADDED
Binary file (486 Bytes). View file
 
emu3/lib/python3.10/site-packages/ninja/__pycache__/ninja_syntax.cpython-310.pyc ADDED
Binary file (5.95 kB). View file
 
emu3/lib/python3.10/site-packages/ninja/_version.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # file generated by setuptools_scm
2
+ # don't change, don't track in version control
3
+ TYPE_CHECKING = False
4
+ if TYPE_CHECKING:
5
+ from typing import Tuple, Union
6
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
7
+ else:
8
+ VERSION_TUPLE = object
9
+
10
+ version: str
11
+ __version__: str
12
+ __version_tuple__: VERSION_TUPLE
13
+ version_tuple: VERSION_TUPLE
14
+
15
+ __version__ = version = '1.11.1.1'
16
+ __version_tuple__ = version_tuple = (1, 11, 1, 1)
emu3/lib/python3.10/site-packages/ninja/ninja_syntax.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/python
2
+
3
+ # Copyright 2011 Google Inc. All Rights Reserved.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ """Python module for generating .ninja files.
18
+
19
+ Note that this is emphatically not a required piece of Ninja; it's
20
+ just a helpful utility for build-file-generation systems that already
21
+ use Python.
22
+ """
23
+
24
+ import re
25
+ import textwrap
26
+
27
+ def escape_path(word):
28
+ return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:')
29
+
30
+ class Writer(object):
31
+ def __init__(self, output, width=78):
32
+ self.output = output
33
+ self.width = width
34
+
35
+ def newline(self):
36
+ self.output.write('\n')
37
+
38
+ def comment(self, text):
39
+ for line in textwrap.wrap(text, self.width - 2, break_long_words=False,
40
+ break_on_hyphens=False):
41
+ self.output.write('# ' + line + '\n')
42
+
43
+ def variable(self, key, value, indent=0):
44
+ if value is None:
45
+ return
46
+ if isinstance(value, list):
47
+ value = ' '.join(filter(None, value)) # Filter out empty strings.
48
+ self._line('%s = %s' % (key, value), indent)
49
+
50
+ def pool(self, name, depth):
51
+ self._line('pool %s' % name)
52
+ self.variable('depth', depth, indent=1)
53
+
54
+ def rule(self, name, command, description=None, depfile=None,
55
+ generator=False, pool=None, restat=False, rspfile=None,
56
+ rspfile_content=None, deps=None):
57
+ self._line('rule %s' % name)
58
+ self.variable('command', command, indent=1)
59
+ if description:
60
+ self.variable('description', description, indent=1)
61
+ if depfile:
62
+ self.variable('depfile', depfile, indent=1)
63
+ if generator:
64
+ self.variable('generator', '1', indent=1)
65
+ if pool:
66
+ self.variable('pool', pool, indent=1)
67
+ if restat:
68
+ self.variable('restat', '1', indent=1)
69
+ if rspfile:
70
+ self.variable('rspfile', rspfile, indent=1)
71
+ if rspfile_content:
72
+ self.variable('rspfile_content', rspfile_content, indent=1)
73
+ if deps:
74
+ self.variable('deps', deps, indent=1)
75
+
76
+ def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
77
+ variables=None, implicit_outputs=None, pool=None, dyndep=None):
78
+ outputs = as_list(outputs)
79
+ out_outputs = [escape_path(x) for x in outputs]
80
+ all_inputs = [escape_path(x) for x in as_list(inputs)]
81
+
82
+ if implicit:
83
+ implicit = [escape_path(x) for x in as_list(implicit)]
84
+ all_inputs.append('|')
85
+ all_inputs.extend(implicit)
86
+ if order_only:
87
+ order_only = [escape_path(x) for x in as_list(order_only)]
88
+ all_inputs.append('||')
89
+ all_inputs.extend(order_only)
90
+ if implicit_outputs:
91
+ implicit_outputs = [escape_path(x)
92
+ for x in as_list(implicit_outputs)]
93
+ out_outputs.append('|')
94
+ out_outputs.extend(implicit_outputs)
95
+
96
+ self._line('build %s: %s' % (' '.join(out_outputs),
97
+ ' '.join([rule] + all_inputs)))
98
+ if pool is not None:
99
+ self._line(' pool = %s' % pool)
100
+ if dyndep is not None:
101
+ self._line(' dyndep = %s' % dyndep)
102
+
103
+ if variables:
104
+ if isinstance(variables, dict):
105
+ iterator = iter(variables.items())
106
+ else:
107
+ iterator = iter(variables)
108
+
109
+ for key, val in iterator:
110
+ self.variable(key, val, indent=1)
111
+
112
+ return outputs
113
+
114
+ def include(self, path):
115
+ self._line('include %s' % path)
116
+
117
+ def subninja(self, path):
118
+ self._line('subninja %s' % path)
119
+
120
+ def default(self, paths):
121
+ self._line('default %s' % ' '.join(as_list(paths)))
122
+
123
+ def _count_dollars_before_index(self, s, i):
124
+ """Returns the number of '$' characters right in front of s[i]."""
125
+ dollar_count = 0
126
+ dollar_index = i - 1
127
+ while dollar_index > 0 and s[dollar_index] == '$':
128
+ dollar_count += 1
129
+ dollar_index -= 1
130
+ return dollar_count
131
+
132
+ def _line(self, text, indent=0):
133
+ """Write 'text' word-wrapped at self.width characters."""
134
+ leading_space = ' ' * indent
135
+ while len(leading_space) + len(text) > self.width:
136
+ # The text is too wide; wrap if possible.
137
+
138
+ # Find the rightmost space that would obey our width constraint and
139
+ # that's not an escaped space.
140
+ available_space = self.width - len(leading_space) - len(' $')
141
+ space = available_space
142
+ while True:
143
+ space = text.rfind(' ', 0, space)
144
+ if (space < 0 or
145
+ self._count_dollars_before_index(text, space) % 2 == 0):
146
+ break
147
+
148
+ if space < 0:
149
+ # No such space; just use the first unescaped space we can find.
150
+ space = available_space - 1
151
+ while True:
152
+ space = text.find(' ', space + 1)
153
+ if (space < 0 or
154
+ self._count_dollars_before_index(text, space) % 2 == 0):
155
+ break
156
+ if space < 0:
157
+ # Give up on breaking.
158
+ break
159
+
160
+ self.output.write(leading_space + text[0:space] + ' $\n')
161
+ text = text[space+1:]
162
+
163
+ # Subsequent lines are continuations, so indent them.
164
+ leading_space = ' ' * (indent+2)
165
+
166
+ self.output.write(leading_space + text + '\n')
167
+
168
+ def close(self):
169
+ self.output.close()
170
+
171
+
172
+ def as_list(input):
173
+ if input is None:
174
+ return []
175
+ if isinstance(input, list):
176
+ return input
177
+ return [input]
178
+
179
+
180
+ def escape(string):
181
+ """Escape a string such that it can be embedded into a Ninja file without
182
+ further interpretation."""
183
+ assert '\n' not in string, 'Ninja syntax does not allow newlines'
184
+ # We only have one special metacharacter: '$'.
185
+ return string.replace('$', '$$')
186
+
187
+
188
+ def expand(string, vars, local_vars={}):
189
+ """Expand a string containing $vars as Ninja would.
190
+
191
+ Note: doesn't handle the full Ninja variable syntax, but it's enough
192
+ to make configure.py's use of it work.
193
+ """
194
+ def exp(m):
195
+ var = m.group(1)
196
+ if var == '$':
197
+ return '$'
198
+ return local_vars.get(var, vars.get(var, ''))
199
+ return re.sub(r'\$(\$|\w*)', exp, string)
emu3/lib/python3.10/site-packages/ninja/py.typed ADDED
File without changes
emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (4.74 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/_constants.cpython-310.pyc ADDED
Binary file (701 Bytes). View file
 
emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/_optional.cpython-310.pyc ADDED
Binary file (4.36 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-310.pyc ADDED
Binary file (5.66 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/compat/__pycache__/pyarrow.cpython-310.pyc ADDED
Binary file (892 Bytes). View file
 
emu3/lib/python3.10/site-packages/pandas/compat/numpy/__init__.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ support numpy compatibility across versions """
2
+ import warnings
3
+
4
+ import numpy as np
5
+
6
+ from pandas.util.version import Version
7
+
8
+ # numpy versioning
9
+ _np_version = np.__version__
10
+ _nlv = Version(_np_version)
11
+ np_version_lt1p23 = _nlv < Version("1.23")
12
+ np_version_gte1p24 = _nlv >= Version("1.24")
13
+ np_version_gte1p24p3 = _nlv >= Version("1.24.3")
14
+ np_version_gte1p25 = _nlv >= Version("1.25")
15
+ np_version_gt2 = _nlv >= Version("2.0.0")
16
+ is_numpy_dev = _nlv.dev is not None
17
+ _min_numpy_ver = "1.22.4"
18
+
19
+
20
+ if _nlv < Version(_min_numpy_ver):
21
+ raise ImportError(
22
+ f"this version of pandas is incompatible with numpy < {_min_numpy_ver}\n"
23
+ f"your numpy version is {_np_version}.\n"
24
+ f"Please upgrade numpy to >= {_min_numpy_ver} to use this pandas version"
25
+ )
26
+
27
+
28
+ np_long: type
29
+ np_ulong: type
30
+
31
+ if np_version_gt2:
32
+ try:
33
+ with warnings.catch_warnings():
34
+ warnings.filterwarnings(
35
+ "ignore",
36
+ r".*In the future `np\.long` will be defined as.*",
37
+ FutureWarning,
38
+ )
39
+ np_long = np.long # type: ignore[attr-defined]
40
+ np_ulong = np.ulong # type: ignore[attr-defined]
41
+ except AttributeError:
42
+ np_long = np.int_
43
+ np_ulong = np.uint
44
+ else:
45
+ np_long = np.int_
46
+ np_ulong = np.uint
47
+
48
+
49
+ __all__ = [
50
+ "np",
51
+ "_np_version",
52
+ "is_numpy_dev",
53
+ ]
emu3/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.25 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/function.cpython-310.pyc ADDED
Binary file (10.5 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/compat/numpy/function.py ADDED
@@ -0,0 +1,418 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ For compatibility with numpy libraries, pandas functions or methods have to
3
+ accept '*args' and '**kwargs' parameters to accommodate numpy arguments that
4
+ are not actually used or respected in the pandas implementation.
5
+
6
+ To ensure that users do not abuse these parameters, validation is performed in
7
+ 'validators.py' to make sure that any extra parameters passed correspond ONLY
8
+ to those in the numpy signature. Part of that validation includes whether or
9
+ not the user attempted to pass in non-default values for these extraneous
10
+ parameters. As we want to discourage users from relying on these parameters
11
+ when calling the pandas implementation, we want them only to pass in the
12
+ default values for these parameters.
13
+
14
+ This module provides a set of commonly used default arguments for functions and
15
+ methods that are spread throughout the codebase. This module will make it
16
+ easier to adjust to future upstream changes in the analogous numpy signatures.
17
+ """
18
+ from __future__ import annotations
19
+
20
+ from typing import (
21
+ TYPE_CHECKING,
22
+ Any,
23
+ TypeVar,
24
+ cast,
25
+ overload,
26
+ )
27
+
28
+ import numpy as np
29
+ from numpy import ndarray
30
+
31
+ from pandas._libs.lib import (
32
+ is_bool,
33
+ is_integer,
34
+ )
35
+ from pandas.errors import UnsupportedFunctionCall
36
+ from pandas.util._validators import (
37
+ validate_args,
38
+ validate_args_and_kwargs,
39
+ validate_kwargs,
40
+ )
41
+
42
+ if TYPE_CHECKING:
43
+ from pandas._typing import (
44
+ Axis,
45
+ AxisInt,
46
+ )
47
+
48
+ AxisNoneT = TypeVar("AxisNoneT", Axis, None)
49
+
50
+
51
+ class CompatValidator:
52
+ def __init__(
53
+ self,
54
+ defaults,
55
+ fname=None,
56
+ method: str | None = None,
57
+ max_fname_arg_count=None,
58
+ ) -> None:
59
+ self.fname = fname
60
+ self.method = method
61
+ self.defaults = defaults
62
+ self.max_fname_arg_count = max_fname_arg_count
63
+
64
+ def __call__(
65
+ self,
66
+ args,
67
+ kwargs,
68
+ fname=None,
69
+ max_fname_arg_count=None,
70
+ method: str | None = None,
71
+ ) -> None:
72
+ if not args and not kwargs:
73
+ return None
74
+
75
+ fname = self.fname if fname is None else fname
76
+ max_fname_arg_count = (
77
+ self.max_fname_arg_count
78
+ if max_fname_arg_count is None
79
+ else max_fname_arg_count
80
+ )
81
+ method = self.method if method is None else method
82
+
83
+ if method == "args":
84
+ validate_args(fname, args, max_fname_arg_count, self.defaults)
85
+ elif method == "kwargs":
86
+ validate_kwargs(fname, kwargs, self.defaults)
87
+ elif method == "both":
88
+ validate_args_and_kwargs(
89
+ fname, args, kwargs, max_fname_arg_count, self.defaults
90
+ )
91
+ else:
92
+ raise ValueError(f"invalid validation method '{method}'")
93
+
94
+
95
+ ARGMINMAX_DEFAULTS = {"out": None}
96
+ validate_argmin = CompatValidator(
97
+ ARGMINMAX_DEFAULTS, fname="argmin", method="both", max_fname_arg_count=1
98
+ )
99
+ validate_argmax = CompatValidator(
100
+ ARGMINMAX_DEFAULTS, fname="argmax", method="both", max_fname_arg_count=1
101
+ )
102
+
103
+
104
+ def process_skipna(skipna: bool | ndarray | None, args) -> tuple[bool, Any]:
105
+ if isinstance(skipna, ndarray) or skipna is None:
106
+ args = (skipna,) + args
107
+ skipna = True
108
+
109
+ return skipna, args
110
+
111
+
112
+ def validate_argmin_with_skipna(skipna: bool | ndarray | None, args, kwargs) -> bool:
113
+ """
114
+ If 'Series.argmin' is called via the 'numpy' library, the third parameter
115
+ in its signature is 'out', which takes either an ndarray or 'None', so
116
+ check if the 'skipna' parameter is either an instance of ndarray or is
117
+ None, since 'skipna' itself should be a boolean
118
+ """
119
+ skipna, args = process_skipna(skipna, args)
120
+ validate_argmin(args, kwargs)
121
+ return skipna
122
+
123
+
124
+ def validate_argmax_with_skipna(skipna: bool | ndarray | None, args, kwargs) -> bool:
125
+ """
126
+ If 'Series.argmax' is called via the 'numpy' library, the third parameter
127
+ in its signature is 'out', which takes either an ndarray or 'None', so
128
+ check if the 'skipna' parameter is either an instance of ndarray or is
129
+ None, since 'skipna' itself should be a boolean
130
+ """
131
+ skipna, args = process_skipna(skipna, args)
132
+ validate_argmax(args, kwargs)
133
+ return skipna
134
+
135
+
136
+ ARGSORT_DEFAULTS: dict[str, int | str | None] = {}
137
+ ARGSORT_DEFAULTS["axis"] = -1
138
+ ARGSORT_DEFAULTS["kind"] = "quicksort"
139
+ ARGSORT_DEFAULTS["order"] = None
140
+ ARGSORT_DEFAULTS["kind"] = None
141
+ ARGSORT_DEFAULTS["stable"] = None
142
+
143
+
144
+ validate_argsort = CompatValidator(
145
+ ARGSORT_DEFAULTS, fname="argsort", max_fname_arg_count=0, method="both"
146
+ )
147
+
148
+ # two different signatures of argsort, this second validation for when the
149
+ # `kind` param is supported
150
+ ARGSORT_DEFAULTS_KIND: dict[str, int | None] = {}
151
+ ARGSORT_DEFAULTS_KIND["axis"] = -1
152
+ ARGSORT_DEFAULTS_KIND["order"] = None
153
+ ARGSORT_DEFAULTS_KIND["stable"] = None
154
+ validate_argsort_kind = CompatValidator(
155
+ ARGSORT_DEFAULTS_KIND, fname="argsort", max_fname_arg_count=0, method="both"
156
+ )
157
+
158
+
159
+ def validate_argsort_with_ascending(ascending: bool | int | None, args, kwargs) -> bool:
160
+ """
161
+ If 'Categorical.argsort' is called via the 'numpy' library, the first
162
+ parameter in its signature is 'axis', which takes either an integer or
163
+ 'None', so check if the 'ascending' parameter has either integer type or is
164
+ None, since 'ascending' itself should be a boolean
165
+ """
166
+ if is_integer(ascending) or ascending is None:
167
+ args = (ascending,) + args
168
+ ascending = True
169
+
170
+ validate_argsort_kind(args, kwargs, max_fname_arg_count=3)
171
+ ascending = cast(bool, ascending)
172
+ return ascending
173
+
174
+
175
+ CLIP_DEFAULTS: dict[str, Any] = {"out": None}
176
+ validate_clip = CompatValidator(
177
+ CLIP_DEFAULTS, fname="clip", method="both", max_fname_arg_count=3
178
+ )
179
+
180
+
181
+ @overload
182
+ def validate_clip_with_axis(axis: ndarray, args, kwargs) -> None:
183
+ ...
184
+
185
+
186
+ @overload
187
+ def validate_clip_with_axis(axis: AxisNoneT, args, kwargs) -> AxisNoneT:
188
+ ...
189
+
190
+
191
+ def validate_clip_with_axis(
192
+ axis: ndarray | AxisNoneT, args, kwargs
193
+ ) -> AxisNoneT | None:
194
+ """
195
+ If 'NDFrame.clip' is called via the numpy library, the third parameter in
196
+ its signature is 'out', which can takes an ndarray, so check if the 'axis'
197
+ parameter is an instance of ndarray, since 'axis' itself should either be
198
+ an integer or None
199
+ """
200
+ if isinstance(axis, ndarray):
201
+ args = (axis,) + args
202
+ # error: Incompatible types in assignment (expression has type "None",
203
+ # variable has type "Union[ndarray[Any, Any], str, int]")
204
+ axis = None # type: ignore[assignment]
205
+
206
+ validate_clip(args, kwargs)
207
+ # error: Incompatible return value type (got "Union[ndarray[Any, Any],
208
+ # str, int]", expected "Union[str, int, None]")
209
+ return axis # type: ignore[return-value]
210
+
211
+
212
+ CUM_FUNC_DEFAULTS: dict[str, Any] = {}
213
+ CUM_FUNC_DEFAULTS["dtype"] = None
214
+ CUM_FUNC_DEFAULTS["out"] = None
215
+ validate_cum_func = CompatValidator(
216
+ CUM_FUNC_DEFAULTS, method="both", max_fname_arg_count=1
217
+ )
218
+ validate_cumsum = CompatValidator(
219
+ CUM_FUNC_DEFAULTS, fname="cumsum", method="both", max_fname_arg_count=1
220
+ )
221
+
222
+
223
+ def validate_cum_func_with_skipna(skipna: bool, args, kwargs, name) -> bool:
224
+ """
225
+ If this function is called via the 'numpy' library, the third parameter in
226
+ its signature is 'dtype', which takes either a 'numpy' dtype or 'None', so
227
+ check if the 'skipna' parameter is a boolean or not
228
+ """
229
+ if not is_bool(skipna):
230
+ args = (skipna,) + args
231
+ skipna = True
232
+ elif isinstance(skipna, np.bool_):
233
+ skipna = bool(skipna)
234
+
235
+ validate_cum_func(args, kwargs, fname=name)
236
+ return skipna
237
+
238
+
239
+ ALLANY_DEFAULTS: dict[str, bool | None] = {}
240
+ ALLANY_DEFAULTS["dtype"] = None
241
+ ALLANY_DEFAULTS["out"] = None
242
+ ALLANY_DEFAULTS["keepdims"] = False
243
+ ALLANY_DEFAULTS["axis"] = None
244
+ validate_all = CompatValidator(
245
+ ALLANY_DEFAULTS, fname="all", method="both", max_fname_arg_count=1
246
+ )
247
+ validate_any = CompatValidator(
248
+ ALLANY_DEFAULTS, fname="any", method="both", max_fname_arg_count=1
249
+ )
250
+
251
+ LOGICAL_FUNC_DEFAULTS = {"out": None, "keepdims": False}
252
+ validate_logical_func = CompatValidator(LOGICAL_FUNC_DEFAULTS, method="kwargs")
253
+
254
+ MINMAX_DEFAULTS = {"axis": None, "dtype": None, "out": None, "keepdims": False}
255
+ validate_min = CompatValidator(
256
+ MINMAX_DEFAULTS, fname="min", method="both", max_fname_arg_count=1
257
+ )
258
+ validate_max = CompatValidator(
259
+ MINMAX_DEFAULTS, fname="max", method="both", max_fname_arg_count=1
260
+ )
261
+
262
+ RESHAPE_DEFAULTS: dict[str, str] = {"order": "C"}
263
+ validate_reshape = CompatValidator(
264
+ RESHAPE_DEFAULTS, fname="reshape", method="both", max_fname_arg_count=1
265
+ )
266
+
267
+ REPEAT_DEFAULTS: dict[str, Any] = {"axis": None}
268
+ validate_repeat = CompatValidator(
269
+ REPEAT_DEFAULTS, fname="repeat", method="both", max_fname_arg_count=1
270
+ )
271
+
272
+ ROUND_DEFAULTS: dict[str, Any] = {"out": None}
273
+ validate_round = CompatValidator(
274
+ ROUND_DEFAULTS, fname="round", method="both", max_fname_arg_count=1
275
+ )
276
+
277
+ SORT_DEFAULTS: dict[str, int | str | None] = {}
278
+ SORT_DEFAULTS["axis"] = -1
279
+ SORT_DEFAULTS["kind"] = "quicksort"
280
+ SORT_DEFAULTS["order"] = None
281
+ validate_sort = CompatValidator(SORT_DEFAULTS, fname="sort", method="kwargs")
282
+
283
+ STAT_FUNC_DEFAULTS: dict[str, Any | None] = {}
284
+ STAT_FUNC_DEFAULTS["dtype"] = None
285
+ STAT_FUNC_DEFAULTS["out"] = None
286
+
287
+ SUM_DEFAULTS = STAT_FUNC_DEFAULTS.copy()
288
+ SUM_DEFAULTS["axis"] = None
289
+ SUM_DEFAULTS["keepdims"] = False
290
+ SUM_DEFAULTS["initial"] = None
291
+
292
+ PROD_DEFAULTS = SUM_DEFAULTS.copy()
293
+
294
+ MEAN_DEFAULTS = SUM_DEFAULTS.copy()
295
+
296
+ MEDIAN_DEFAULTS = STAT_FUNC_DEFAULTS.copy()
297
+ MEDIAN_DEFAULTS["overwrite_input"] = False
298
+ MEDIAN_DEFAULTS["keepdims"] = False
299
+
300
+ STAT_FUNC_DEFAULTS["keepdims"] = False
301
+
302
+ validate_stat_func = CompatValidator(STAT_FUNC_DEFAULTS, method="kwargs")
303
+ validate_sum = CompatValidator(
304
+ SUM_DEFAULTS, fname="sum", method="both", max_fname_arg_count=1
305
+ )
306
+ validate_prod = CompatValidator(
307
+ PROD_DEFAULTS, fname="prod", method="both", max_fname_arg_count=1
308
+ )
309
+ validate_mean = CompatValidator(
310
+ MEAN_DEFAULTS, fname="mean", method="both", max_fname_arg_count=1
311
+ )
312
+ validate_median = CompatValidator(
313
+ MEDIAN_DEFAULTS, fname="median", method="both", max_fname_arg_count=1
314
+ )
315
+
316
+ STAT_DDOF_FUNC_DEFAULTS: dict[str, bool | None] = {}
317
+ STAT_DDOF_FUNC_DEFAULTS["dtype"] = None
318
+ STAT_DDOF_FUNC_DEFAULTS["out"] = None
319
+ STAT_DDOF_FUNC_DEFAULTS["keepdims"] = False
320
+ validate_stat_ddof_func = CompatValidator(STAT_DDOF_FUNC_DEFAULTS, method="kwargs")
321
+
322
+ TAKE_DEFAULTS: dict[str, str | None] = {}
323
+ TAKE_DEFAULTS["out"] = None
324
+ TAKE_DEFAULTS["mode"] = "raise"
325
+ validate_take = CompatValidator(TAKE_DEFAULTS, fname="take", method="kwargs")
326
+
327
+
328
+ def validate_take_with_convert(convert: ndarray | bool | None, args, kwargs) -> bool:
329
+ """
330
+ If this function is called via the 'numpy' library, the third parameter in
331
+ its signature is 'axis', which takes either an ndarray or 'None', so check
332
+ if the 'convert' parameter is either an instance of ndarray or is None
333
+ """
334
+ if isinstance(convert, ndarray) or convert is None:
335
+ args = (convert,) + args
336
+ convert = True
337
+
338
+ validate_take(args, kwargs, max_fname_arg_count=3, method="both")
339
+ return convert
340
+
341
+
342
+ TRANSPOSE_DEFAULTS = {"axes": None}
343
+ validate_transpose = CompatValidator(
344
+ TRANSPOSE_DEFAULTS, fname="transpose", method="both", max_fname_arg_count=0
345
+ )
346
+
347
+
348
+ def validate_groupby_func(name: str, args, kwargs, allowed=None) -> None:
349
+ """
350
+ 'args' and 'kwargs' should be empty, except for allowed kwargs because all
351
+ of their necessary parameters are explicitly listed in the function
352
+ signature
353
+ """
354
+ if allowed is None:
355
+ allowed = []
356
+
357
+ kwargs = set(kwargs) - set(allowed)
358
+
359
+ if len(args) + len(kwargs) > 0:
360
+ raise UnsupportedFunctionCall(
361
+ "numpy operations are not valid with groupby. "
362
+ f"Use .groupby(...).{name}() instead"
363
+ )
364
+
365
+
366
+ RESAMPLER_NUMPY_OPS = ("min", "max", "sum", "prod", "mean", "std", "var")
367
+
368
+
369
+ def validate_resampler_func(method: str, args, kwargs) -> None:
370
+ """
371
+ 'args' and 'kwargs' should be empty because all of their necessary
372
+ parameters are explicitly listed in the function signature
373
+ """
374
+ if len(args) + len(kwargs) > 0:
375
+ if method in RESAMPLER_NUMPY_OPS:
376
+ raise UnsupportedFunctionCall(
377
+ "numpy operations are not valid with resample. "
378
+ f"Use .resample(...).{method}() instead"
379
+ )
380
+ raise TypeError("too many arguments passed in")
381
+
382
+
383
+ def validate_minmax_axis(axis: AxisInt | None, ndim: int = 1) -> None:
384
+ """
385
+ Ensure that the axis argument passed to min, max, argmin, or argmax is zero
386
+ or None, as otherwise it will be incorrectly ignored.
387
+
388
+ Parameters
389
+ ----------
390
+ axis : int or None
391
+ ndim : int, default 1
392
+
393
+ Raises
394
+ ------
395
+ ValueError
396
+ """
397
+ if axis is None:
398
+ return
399
+ if axis >= ndim or (axis < 0 and ndim + axis < 0):
400
+ raise ValueError(f"`axis` must be fewer than the number of dimensions ({ndim})")
401
+
402
+
403
+ _validation_funcs = {
404
+ "median": validate_median,
405
+ "mean": validate_mean,
406
+ "min": validate_min,
407
+ "max": validate_max,
408
+ "sum": validate_sum,
409
+ "prod": validate_prod,
410
+ }
411
+
412
+
413
+ def validate_func(fname, args, kwargs) -> None:
414
+ if fname not in _validation_funcs:
415
+ return validate_stat_func(args, kwargs, fname=fname)
416
+
417
+ validation_func = _validation_funcs[fname]
418
+ return validation_func(args, kwargs)
emu3/lib/python3.10/site-packages/pandas/plotting/__init__.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Plotting public API.
3
+
4
+ Authors of third-party plotting backends should implement a module with a
5
+ public ``plot(data, kind, **kwargs)``. The parameter `data` will contain
6
+ the data structure and can be a `Series` or a `DataFrame`. For example,
7
+ for ``df.plot()`` the parameter `data` will contain the DataFrame `df`.
8
+ In some cases, the data structure is transformed before being sent to
9
+ the backend (see PlotAccessor.__call__ in pandas/plotting/_core.py for
10
+ the exact transformations).
11
+
12
+ The parameter `kind` will be one of:
13
+
14
+ - line
15
+ - bar
16
+ - barh
17
+ - box
18
+ - hist
19
+ - kde
20
+ - area
21
+ - pie
22
+ - scatter
23
+ - hexbin
24
+
25
+ See the pandas API reference for documentation on each kind of plot.
26
+
27
+ Any other keyword argument is currently assumed to be backend specific,
28
+ but some parameters may be unified and added to the signature in the
29
+ future (e.g. `title` which should be useful for any backend).
30
+
31
+ Currently, all the Matplotlib functions in pandas are accessed through
32
+ the selected backend. For example, `pandas.plotting.boxplot` (equivalent
33
+ to `DataFrame.boxplot`) is also accessed in the selected backend. This
34
+ is expected to change, and the exact API is under discussion. But with
35
+ the current version, backends are expected to implement the next functions:
36
+
37
+ - plot (describe above, used for `Series.plot` and `DataFrame.plot`)
38
+ - hist_series and hist_frame (for `Series.hist` and `DataFrame.hist`)
39
+ - boxplot (`pandas.plotting.boxplot(df)` equivalent to `DataFrame.boxplot`)
40
+ - boxplot_frame and boxplot_frame_groupby
41
+ - register and deregister (register converters for the tick formats)
42
+ - Plots not called as `Series` and `DataFrame` methods:
43
+ - table
44
+ - andrews_curves
45
+ - autocorrelation_plot
46
+ - bootstrap_plot
47
+ - lag_plot
48
+ - parallel_coordinates
49
+ - radviz
50
+ - scatter_matrix
51
+
52
+ Use the code in pandas/plotting/_matplotib.py and
53
+ https://github.com/pyviz/hvplot as a reference on how to write a backend.
54
+
55
+ For the discussion about the API see
56
+ https://github.com/pandas-dev/pandas/issues/26747.
57
+ """
58
+ from pandas.plotting._core import (
59
+ PlotAccessor,
60
+ boxplot,
61
+ boxplot_frame,
62
+ boxplot_frame_groupby,
63
+ hist_frame,
64
+ hist_series,
65
+ )
66
+ from pandas.plotting._misc import (
67
+ andrews_curves,
68
+ autocorrelation_plot,
69
+ bootstrap_plot,
70
+ deregister as deregister_matplotlib_converters,
71
+ lag_plot,
72
+ parallel_coordinates,
73
+ plot_params,
74
+ radviz,
75
+ register as register_matplotlib_converters,
76
+ scatter_matrix,
77
+ table,
78
+ )
79
+
80
+ __all__ = [
81
+ "PlotAccessor",
82
+ "boxplot",
83
+ "boxplot_frame",
84
+ "boxplot_frame_groupby",
85
+ "hist_frame",
86
+ "hist_series",
87
+ "scatter_matrix",
88
+ "radviz",
89
+ "andrews_curves",
90
+ "bootstrap_plot",
91
+ "parallel_coordinates",
92
+ "lag_plot",
93
+ "autocorrelation_plot",
94
+ "table",
95
+ "plot_params",
96
+ "register_matplotlib_converters",
97
+ "deregister_matplotlib_converters",
98
+ ]
emu3/lib/python3.10/site-packages/pandas/plotting/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (2.79 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/__pycache__/_core.cpython-310.pyc ADDED
Binary file (61.2 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/__pycache__/_misc.cpython-310.pyc ADDED
Binary file (21.2 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_core.py ADDED
@@ -0,0 +1,1946 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import importlib
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Callable,
7
+ Literal,
8
+ )
9
+
10
+ from pandas._config import get_option
11
+
12
+ from pandas.util._decorators import (
13
+ Appender,
14
+ Substitution,
15
+ )
16
+
17
+ from pandas.core.dtypes.common import (
18
+ is_integer,
19
+ is_list_like,
20
+ )
21
+ from pandas.core.dtypes.generic import (
22
+ ABCDataFrame,
23
+ ABCSeries,
24
+ )
25
+
26
+ from pandas.core.base import PandasObject
27
+
28
+ if TYPE_CHECKING:
29
+ from collections.abc import (
30
+ Hashable,
31
+ Sequence,
32
+ )
33
+ import types
34
+
35
+ from matplotlib.axes import Axes
36
+ import numpy as np
37
+
38
+ from pandas._typing import IndexLabel
39
+
40
+ from pandas import (
41
+ DataFrame,
42
+ Series,
43
+ )
44
+ from pandas.core.groupby.generic import DataFrameGroupBy
45
+
46
+
47
+ def hist_series(
48
+ self: Series,
49
+ by=None,
50
+ ax=None,
51
+ grid: bool = True,
52
+ xlabelsize: int | None = None,
53
+ xrot: float | None = None,
54
+ ylabelsize: int | None = None,
55
+ yrot: float | None = None,
56
+ figsize: tuple[int, int] | None = None,
57
+ bins: int | Sequence[int] = 10,
58
+ backend: str | None = None,
59
+ legend: bool = False,
60
+ **kwargs,
61
+ ):
62
+ """
63
+ Draw histogram of the input series using matplotlib.
64
+
65
+ Parameters
66
+ ----------
67
+ by : object, optional
68
+ If passed, then used to form histograms for separate groups.
69
+ ax : matplotlib axis object
70
+ If not passed, uses gca().
71
+ grid : bool, default True
72
+ Whether to show axis grid lines.
73
+ xlabelsize : int, default None
74
+ If specified changes the x-axis label size.
75
+ xrot : float, default None
76
+ Rotation of x axis labels.
77
+ ylabelsize : int, default None
78
+ If specified changes the y-axis label size.
79
+ yrot : float, default None
80
+ Rotation of y axis labels.
81
+ figsize : tuple, default None
82
+ Figure size in inches by default.
83
+ bins : int or sequence, default 10
84
+ Number of histogram bins to be used. If an integer is given, bins + 1
85
+ bin edges are calculated and returned. If bins is a sequence, gives
86
+ bin edges, including left edge of first bin and right edge of last
87
+ bin. In this case, bins is returned unmodified.
88
+ backend : str, default None
89
+ Backend to use instead of the backend specified in the option
90
+ ``plotting.backend``. For instance, 'matplotlib'. Alternatively, to
91
+ specify the ``plotting.backend`` for the whole session, set
92
+ ``pd.options.plotting.backend``.
93
+ legend : bool, default False
94
+ Whether to show the legend.
95
+
96
+ **kwargs
97
+ To be passed to the actual plotting function.
98
+
99
+ Returns
100
+ -------
101
+ matplotlib.AxesSubplot
102
+ A histogram plot.
103
+
104
+ See Also
105
+ --------
106
+ matplotlib.axes.Axes.hist : Plot a histogram using matplotlib.
107
+
108
+ Examples
109
+ --------
110
+ For Series:
111
+
112
+ .. plot::
113
+ :context: close-figs
114
+
115
+ >>> lst = ['a', 'a', 'a', 'b', 'b', 'b']
116
+ >>> ser = pd.Series([1, 2, 2, 4, 6, 6], index=lst)
117
+ >>> hist = ser.hist()
118
+
119
+ For Groupby:
120
+
121
+ .. plot::
122
+ :context: close-figs
123
+
124
+ >>> lst = ['a', 'a', 'a', 'b', 'b', 'b']
125
+ >>> ser = pd.Series([1, 2, 2, 4, 6, 6], index=lst)
126
+ >>> hist = ser.groupby(level=0).hist()
127
+ """
128
+ plot_backend = _get_plot_backend(backend)
129
+ return plot_backend.hist_series(
130
+ self,
131
+ by=by,
132
+ ax=ax,
133
+ grid=grid,
134
+ xlabelsize=xlabelsize,
135
+ xrot=xrot,
136
+ ylabelsize=ylabelsize,
137
+ yrot=yrot,
138
+ figsize=figsize,
139
+ bins=bins,
140
+ legend=legend,
141
+ **kwargs,
142
+ )
143
+
144
+
145
+ def hist_frame(
146
+ data: DataFrame,
147
+ column: IndexLabel | None = None,
148
+ by=None,
149
+ grid: bool = True,
150
+ xlabelsize: int | None = None,
151
+ xrot: float | None = None,
152
+ ylabelsize: int | None = None,
153
+ yrot: float | None = None,
154
+ ax=None,
155
+ sharex: bool = False,
156
+ sharey: bool = False,
157
+ figsize: tuple[int, int] | None = None,
158
+ layout: tuple[int, int] | None = None,
159
+ bins: int | Sequence[int] = 10,
160
+ backend: str | None = None,
161
+ legend: bool = False,
162
+ **kwargs,
163
+ ):
164
+ """
165
+ Make a histogram of the DataFrame's columns.
166
+
167
+ A `histogram`_ is a representation of the distribution of data.
168
+ This function calls :meth:`matplotlib.pyplot.hist`, on each series in
169
+ the DataFrame, resulting in one histogram per column.
170
+
171
+ .. _histogram: https://en.wikipedia.org/wiki/Histogram
172
+
173
+ Parameters
174
+ ----------
175
+ data : DataFrame
176
+ The pandas object holding the data.
177
+ column : str or sequence, optional
178
+ If passed, will be used to limit data to a subset of columns.
179
+ by : object, optional
180
+ If passed, then used to form histograms for separate groups.
181
+ grid : bool, default True
182
+ Whether to show axis grid lines.
183
+ xlabelsize : int, default None
184
+ If specified changes the x-axis label size.
185
+ xrot : float, default None
186
+ Rotation of x axis labels. For example, a value of 90 displays the
187
+ x labels rotated 90 degrees clockwise.
188
+ ylabelsize : int, default None
189
+ If specified changes the y-axis label size.
190
+ yrot : float, default None
191
+ Rotation of y axis labels. For example, a value of 90 displays the
192
+ y labels rotated 90 degrees clockwise.
193
+ ax : Matplotlib axes object, default None
194
+ The axes to plot the histogram on.
195
+ sharex : bool, default True if ax is None else False
196
+ In case subplots=True, share x axis and set some x axis labels to
197
+ invisible; defaults to True if ax is None otherwise False if an ax
198
+ is passed in.
199
+ Note that passing in both an ax and sharex=True will alter all x axis
200
+ labels for all subplots in a figure.
201
+ sharey : bool, default False
202
+ In case subplots=True, share y axis and set some y axis labels to
203
+ invisible.
204
+ figsize : tuple, optional
205
+ The size in inches of the figure to create. Uses the value in
206
+ `matplotlib.rcParams` by default.
207
+ layout : tuple, optional
208
+ Tuple of (rows, columns) for the layout of the histograms.
209
+ bins : int or sequence, default 10
210
+ Number of histogram bins to be used. If an integer is given, bins + 1
211
+ bin edges are calculated and returned. If bins is a sequence, gives
212
+ bin edges, including left edge of first bin and right edge of last
213
+ bin. In this case, bins is returned unmodified.
214
+
215
+ backend : str, default None
216
+ Backend to use instead of the backend specified in the option
217
+ ``plotting.backend``. For instance, 'matplotlib'. Alternatively, to
218
+ specify the ``plotting.backend`` for the whole session, set
219
+ ``pd.options.plotting.backend``.
220
+
221
+ legend : bool, default False
222
+ Whether to show the legend.
223
+
224
+ **kwargs
225
+ All other plotting keyword arguments to be passed to
226
+ :meth:`matplotlib.pyplot.hist`.
227
+
228
+ Returns
229
+ -------
230
+ matplotlib.AxesSubplot or numpy.ndarray of them
231
+
232
+ See Also
233
+ --------
234
+ matplotlib.pyplot.hist : Plot a histogram using matplotlib.
235
+
236
+ Examples
237
+ --------
238
+ This example draws a histogram based on the length and width of
239
+ some animals, displayed in three bins
240
+
241
+ .. plot::
242
+ :context: close-figs
243
+
244
+ >>> data = {'length': [1.5, 0.5, 1.2, 0.9, 3],
245
+ ... 'width': [0.7, 0.2, 0.15, 0.2, 1.1]}
246
+ >>> index = ['pig', 'rabbit', 'duck', 'chicken', 'horse']
247
+ >>> df = pd.DataFrame(data, index=index)
248
+ >>> hist = df.hist(bins=3)
249
+ """
250
+ plot_backend = _get_plot_backend(backend)
251
+ return plot_backend.hist_frame(
252
+ data,
253
+ column=column,
254
+ by=by,
255
+ grid=grid,
256
+ xlabelsize=xlabelsize,
257
+ xrot=xrot,
258
+ ylabelsize=ylabelsize,
259
+ yrot=yrot,
260
+ ax=ax,
261
+ sharex=sharex,
262
+ sharey=sharey,
263
+ figsize=figsize,
264
+ layout=layout,
265
+ legend=legend,
266
+ bins=bins,
267
+ **kwargs,
268
+ )
269
+
270
+
271
+ _boxplot_doc = """
272
+ Make a box plot from DataFrame columns.
273
+
274
+ Make a box-and-whisker plot from DataFrame columns, optionally grouped
275
+ by some other columns. A box plot is a method for graphically depicting
276
+ groups of numerical data through their quartiles.
277
+ The box extends from the Q1 to Q3 quartile values of the data,
278
+ with a line at the median (Q2). The whiskers extend from the edges
279
+ of box to show the range of the data. By default, they extend no more than
280
+ `1.5 * IQR (IQR = Q3 - Q1)` from the edges of the box, ending at the farthest
281
+ data point within that interval. Outliers are plotted as separate dots.
282
+
283
+ For further details see
284
+ Wikipedia's entry for `boxplot <https://en.wikipedia.org/wiki/Box_plot>`_.
285
+
286
+ Parameters
287
+ ----------
288
+ %(data)s\
289
+ column : str or list of str, optional
290
+ Column name or list of names, or vector.
291
+ Can be any valid input to :meth:`pandas.DataFrame.groupby`.
292
+ by : str or array-like, optional
293
+ Column in the DataFrame to :meth:`pandas.DataFrame.groupby`.
294
+ One box-plot will be done per value of columns in `by`.
295
+ ax : object of class matplotlib.axes.Axes, optional
296
+ The matplotlib axes to be used by boxplot.
297
+ fontsize : float or str
298
+ Tick label font size in points or as a string (e.g., `large`).
299
+ rot : float, default 0
300
+ The rotation angle of labels (in degrees)
301
+ with respect to the screen coordinate system.
302
+ grid : bool, default True
303
+ Setting this to True will show the grid.
304
+ figsize : A tuple (width, height) in inches
305
+ The size of the figure to create in matplotlib.
306
+ layout : tuple (rows, columns), optional
307
+ For example, (3, 5) will display the subplots
308
+ using 3 rows and 5 columns, starting from the top-left.
309
+ return_type : {'axes', 'dict', 'both'} or None, default 'axes'
310
+ The kind of object to return. The default is ``axes``.
311
+
312
+ * 'axes' returns the matplotlib axes the boxplot is drawn on.
313
+ * 'dict' returns a dictionary whose values are the matplotlib
314
+ Lines of the boxplot.
315
+ * 'both' returns a namedtuple with the axes and dict.
316
+ * when grouping with ``by``, a Series mapping columns to
317
+ ``return_type`` is returned.
318
+
319
+ If ``return_type`` is `None`, a NumPy array
320
+ of axes with the same shape as ``layout`` is returned.
321
+ %(backend)s\
322
+
323
+ **kwargs
324
+ All other plotting keyword arguments to be passed to
325
+ :func:`matplotlib.pyplot.boxplot`.
326
+
327
+ Returns
328
+ -------
329
+ result
330
+ See Notes.
331
+
332
+ See Also
333
+ --------
334
+ pandas.Series.plot.hist: Make a histogram.
335
+ matplotlib.pyplot.boxplot : Matplotlib equivalent plot.
336
+
337
+ Notes
338
+ -----
339
+ The return type depends on the `return_type` parameter:
340
+
341
+ * 'axes' : object of class matplotlib.axes.Axes
342
+ * 'dict' : dict of matplotlib.lines.Line2D objects
343
+ * 'both' : a namedtuple with structure (ax, lines)
344
+
345
+ For data grouped with ``by``, return a Series of the above or a numpy
346
+ array:
347
+
348
+ * :class:`~pandas.Series`
349
+ * :class:`~numpy.array` (for ``return_type = None``)
350
+
351
+ Use ``return_type='dict'`` when you want to tweak the appearance
352
+ of the lines after plotting. In this case a dict containing the Lines
353
+ making up the boxes, caps, fliers, medians, and whiskers is returned.
354
+
355
+ Examples
356
+ --------
357
+
358
+ Boxplots can be created for every column in the dataframe
359
+ by ``df.boxplot()`` or indicating the columns to be used:
360
+
361
+ .. plot::
362
+ :context: close-figs
363
+
364
+ >>> np.random.seed(1234)
365
+ >>> df = pd.DataFrame(np.random.randn(10, 4),
366
+ ... columns=['Col1', 'Col2', 'Col3', 'Col4'])
367
+ >>> boxplot = df.boxplot(column=['Col1', 'Col2', 'Col3']) # doctest: +SKIP
368
+
369
+ Boxplots of variables distributions grouped by the values of a third
370
+ variable can be created using the option ``by``. For instance:
371
+
372
+ .. plot::
373
+ :context: close-figs
374
+
375
+ >>> df = pd.DataFrame(np.random.randn(10, 2),
376
+ ... columns=['Col1', 'Col2'])
377
+ >>> df['X'] = pd.Series(['A', 'A', 'A', 'A', 'A',
378
+ ... 'B', 'B', 'B', 'B', 'B'])
379
+ >>> boxplot = df.boxplot(by='X')
380
+
381
+ A list of strings (i.e. ``['X', 'Y']``) can be passed to boxplot
382
+ in order to group the data by combination of the variables in the x-axis:
383
+
384
+ .. plot::
385
+ :context: close-figs
386
+
387
+ >>> df = pd.DataFrame(np.random.randn(10, 3),
388
+ ... columns=['Col1', 'Col2', 'Col3'])
389
+ >>> df['X'] = pd.Series(['A', 'A', 'A', 'A', 'A',
390
+ ... 'B', 'B', 'B', 'B', 'B'])
391
+ >>> df['Y'] = pd.Series(['A', 'B', 'A', 'B', 'A',
392
+ ... 'B', 'A', 'B', 'A', 'B'])
393
+ >>> boxplot = df.boxplot(column=['Col1', 'Col2'], by=['X', 'Y'])
394
+
395
+ The layout of boxplot can be adjusted giving a tuple to ``layout``:
396
+
397
+ .. plot::
398
+ :context: close-figs
399
+
400
+ >>> boxplot = df.boxplot(column=['Col1', 'Col2'], by='X',
401
+ ... layout=(2, 1))
402
+
403
+ Additional formatting can be done to the boxplot, like suppressing the grid
404
+ (``grid=False``), rotating the labels in the x-axis (i.e. ``rot=45``)
405
+ or changing the fontsize (i.e. ``fontsize=15``):
406
+
407
+ .. plot::
408
+ :context: close-figs
409
+
410
+ >>> boxplot = df.boxplot(grid=False, rot=45, fontsize=15) # doctest: +SKIP
411
+
412
+ The parameter ``return_type`` can be used to select the type of element
413
+ returned by `boxplot`. When ``return_type='axes'`` is selected,
414
+ the matplotlib axes on which the boxplot is drawn are returned:
415
+
416
+ >>> boxplot = df.boxplot(column=['Col1', 'Col2'], return_type='axes')
417
+ >>> type(boxplot)
418
+ <class 'matplotlib.axes._axes.Axes'>
419
+
420
+ When grouping with ``by``, a Series mapping columns to ``return_type``
421
+ is returned:
422
+
423
+ >>> boxplot = df.boxplot(column=['Col1', 'Col2'], by='X',
424
+ ... return_type='axes')
425
+ >>> type(boxplot)
426
+ <class 'pandas.core.series.Series'>
427
+
428
+ If ``return_type`` is `None`, a NumPy array of axes with the same shape
429
+ as ``layout`` is returned:
430
+
431
+ >>> boxplot = df.boxplot(column=['Col1', 'Col2'], by='X',
432
+ ... return_type=None)
433
+ >>> type(boxplot)
434
+ <class 'numpy.ndarray'>
435
+ """
436
+
437
+ _backend_doc = """\
438
+ backend : str, default None
439
+ Backend to use instead of the backend specified in the option
440
+ ``plotting.backend``. For instance, 'matplotlib'. Alternatively, to
441
+ specify the ``plotting.backend`` for the whole session, set
442
+ ``pd.options.plotting.backend``.
443
+ """
444
+
445
+
446
+ _bar_or_line_doc = """
447
+ Parameters
448
+ ----------
449
+ x : label or position, optional
450
+ Allows plotting of one column versus another. If not specified,
451
+ the index of the DataFrame is used.
452
+ y : label or position, optional
453
+ Allows plotting of one column versus another. If not specified,
454
+ all numerical columns are used.
455
+ color : str, array-like, or dict, optional
456
+ The color for each of the DataFrame's columns. Possible values are:
457
+
458
+ - A single color string referred to by name, RGB or RGBA code,
459
+ for instance 'red' or '#a98d19'.
460
+
461
+ - A sequence of color strings referred to by name, RGB or RGBA
462
+ code, which will be used for each column recursively. For
463
+ instance ['green','yellow'] each column's %(kind)s will be filled in
464
+ green or yellow, alternatively. If there is only a single column to
465
+ be plotted, then only the first color from the color list will be
466
+ used.
467
+
468
+ - A dict of the form {column name : color}, so that each column will be
469
+ colored accordingly. For example, if your columns are called `a` and
470
+ `b`, then passing {'a': 'green', 'b': 'red'} will color %(kind)ss for
471
+ column `a` in green and %(kind)ss for column `b` in red.
472
+
473
+ **kwargs
474
+ Additional keyword arguments are documented in
475
+ :meth:`DataFrame.plot`.
476
+
477
+ Returns
478
+ -------
479
+ matplotlib.axes.Axes or np.ndarray of them
480
+ An ndarray is returned with one :class:`matplotlib.axes.Axes`
481
+ per column when ``subplots=True``.
482
+ """
483
+
484
+
485
+ @Substitution(data="data : DataFrame\n The data to visualize.\n", backend="")
486
+ @Appender(_boxplot_doc)
487
+ def boxplot(
488
+ data: DataFrame,
489
+ column: str | list[str] | None = None,
490
+ by: str | list[str] | None = None,
491
+ ax: Axes | None = None,
492
+ fontsize: float | str | None = None,
493
+ rot: int = 0,
494
+ grid: bool = True,
495
+ figsize: tuple[float, float] | None = None,
496
+ layout: tuple[int, int] | None = None,
497
+ return_type: str | None = None,
498
+ **kwargs,
499
+ ):
500
+ plot_backend = _get_plot_backend("matplotlib")
501
+ return plot_backend.boxplot(
502
+ data,
503
+ column=column,
504
+ by=by,
505
+ ax=ax,
506
+ fontsize=fontsize,
507
+ rot=rot,
508
+ grid=grid,
509
+ figsize=figsize,
510
+ layout=layout,
511
+ return_type=return_type,
512
+ **kwargs,
513
+ )
514
+
515
+
516
+ @Substitution(data="", backend=_backend_doc)
517
+ @Appender(_boxplot_doc)
518
+ def boxplot_frame(
519
+ self: DataFrame,
520
+ column=None,
521
+ by=None,
522
+ ax=None,
523
+ fontsize: int | None = None,
524
+ rot: int = 0,
525
+ grid: bool = True,
526
+ figsize: tuple[float, float] | None = None,
527
+ layout=None,
528
+ return_type=None,
529
+ backend=None,
530
+ **kwargs,
531
+ ):
532
+ plot_backend = _get_plot_backend(backend)
533
+ return plot_backend.boxplot_frame(
534
+ self,
535
+ column=column,
536
+ by=by,
537
+ ax=ax,
538
+ fontsize=fontsize,
539
+ rot=rot,
540
+ grid=grid,
541
+ figsize=figsize,
542
+ layout=layout,
543
+ return_type=return_type,
544
+ **kwargs,
545
+ )
546
+
547
+
548
+ def boxplot_frame_groupby(
549
+ grouped: DataFrameGroupBy,
550
+ subplots: bool = True,
551
+ column=None,
552
+ fontsize: int | None = None,
553
+ rot: int = 0,
554
+ grid: bool = True,
555
+ ax=None,
556
+ figsize: tuple[float, float] | None = None,
557
+ layout=None,
558
+ sharex: bool = False,
559
+ sharey: bool = True,
560
+ backend=None,
561
+ **kwargs,
562
+ ):
563
+ """
564
+ Make box plots from DataFrameGroupBy data.
565
+
566
+ Parameters
567
+ ----------
568
+ grouped : Grouped DataFrame
569
+ subplots : bool
570
+ * ``False`` - no subplots will be used
571
+ * ``True`` - create a subplot for each group.
572
+
573
+ column : column name or list of names, or vector
574
+ Can be any valid input to groupby.
575
+ fontsize : float or str
576
+ rot : label rotation angle
577
+ grid : Setting this to True will show the grid
578
+ ax : Matplotlib axis object, default None
579
+ figsize : A tuple (width, height) in inches
580
+ layout : tuple (optional)
581
+ The layout of the plot: (rows, columns).
582
+ sharex : bool, default False
583
+ Whether x-axes will be shared among subplots.
584
+ sharey : bool, default True
585
+ Whether y-axes will be shared among subplots.
586
+ backend : str, default None
587
+ Backend to use instead of the backend specified in the option
588
+ ``plotting.backend``. For instance, 'matplotlib'. Alternatively, to
589
+ specify the ``plotting.backend`` for the whole session, set
590
+ ``pd.options.plotting.backend``.
591
+ **kwargs
592
+ All other plotting keyword arguments to be passed to
593
+ matplotlib's boxplot function.
594
+
595
+ Returns
596
+ -------
597
+ dict of key/value = group key/DataFrame.boxplot return value
598
+ or DataFrame.boxplot return value in case subplots=figures=False
599
+
600
+ Examples
601
+ --------
602
+ You can create boxplots for grouped data and show them as separate subplots:
603
+
604
+ .. plot::
605
+ :context: close-figs
606
+
607
+ >>> import itertools
608
+ >>> tuples = [t for t in itertools.product(range(1000), range(4))]
609
+ >>> index = pd.MultiIndex.from_tuples(tuples, names=['lvl0', 'lvl1'])
610
+ >>> data = np.random.randn(len(index), 4)
611
+ >>> df = pd.DataFrame(data, columns=list('ABCD'), index=index)
612
+ >>> grouped = df.groupby(level='lvl1')
613
+ >>> grouped.boxplot(rot=45, fontsize=12, figsize=(8, 10)) # doctest: +SKIP
614
+
615
+ The ``subplots=False`` option shows the boxplots in a single figure.
616
+
617
+ .. plot::
618
+ :context: close-figs
619
+
620
+ >>> grouped.boxplot(subplots=False, rot=45, fontsize=12) # doctest: +SKIP
621
+ """
622
+ plot_backend = _get_plot_backend(backend)
623
+ return plot_backend.boxplot_frame_groupby(
624
+ grouped,
625
+ subplots=subplots,
626
+ column=column,
627
+ fontsize=fontsize,
628
+ rot=rot,
629
+ grid=grid,
630
+ ax=ax,
631
+ figsize=figsize,
632
+ layout=layout,
633
+ sharex=sharex,
634
+ sharey=sharey,
635
+ **kwargs,
636
+ )
637
+
638
+
639
+ class PlotAccessor(PandasObject):
640
+ """
641
+ Make plots of Series or DataFrame.
642
+
643
+ Uses the backend specified by the
644
+ option ``plotting.backend``. By default, matplotlib is used.
645
+
646
+ Parameters
647
+ ----------
648
+ data : Series or DataFrame
649
+ The object for which the method is called.
650
+ x : label or position, default None
651
+ Only used if data is a DataFrame.
652
+ y : label, position or list of label, positions, default None
653
+ Allows plotting of one column versus another. Only used if data is a
654
+ DataFrame.
655
+ kind : str
656
+ The kind of plot to produce:
657
+
658
+ - 'line' : line plot (default)
659
+ - 'bar' : vertical bar plot
660
+ - 'barh' : horizontal bar plot
661
+ - 'hist' : histogram
662
+ - 'box' : boxplot
663
+ - 'kde' : Kernel Density Estimation plot
664
+ - 'density' : same as 'kde'
665
+ - 'area' : area plot
666
+ - 'pie' : pie plot
667
+ - 'scatter' : scatter plot (DataFrame only)
668
+ - 'hexbin' : hexbin plot (DataFrame only)
669
+ ax : matplotlib axes object, default None
670
+ An axes of the current figure.
671
+ subplots : bool or sequence of iterables, default False
672
+ Whether to group columns into subplots:
673
+
674
+ - ``False`` : No subplots will be used
675
+ - ``True`` : Make separate subplots for each column.
676
+ - sequence of iterables of column labels: Create a subplot for each
677
+ group of columns. For example `[('a', 'c'), ('b', 'd')]` will
678
+ create 2 subplots: one with columns 'a' and 'c', and one
679
+ with columns 'b' and 'd'. Remaining columns that aren't specified
680
+ will be plotted in additional subplots (one per column).
681
+
682
+ .. versionadded:: 1.5.0
683
+
684
+ sharex : bool, default True if ax is None else False
685
+ In case ``subplots=True``, share x axis and set some x axis labels
686
+ to invisible; defaults to True if ax is None otherwise False if
687
+ an ax is passed in; Be aware, that passing in both an ax and
688
+ ``sharex=True`` will alter all x axis labels for all axis in a figure.
689
+ sharey : bool, default False
690
+ In case ``subplots=True``, share y axis and set some y axis labels to invisible.
691
+ layout : tuple, optional
692
+ (rows, columns) for the layout of subplots.
693
+ figsize : a tuple (width, height) in inches
694
+ Size of a figure object.
695
+ use_index : bool, default True
696
+ Use index as ticks for x axis.
697
+ title : str or list
698
+ Title to use for the plot. If a string is passed, print the string
699
+ at the top of the figure. If a list is passed and `subplots` is
700
+ True, print each item in the list above the corresponding subplot.
701
+ grid : bool, default None (matlab style default)
702
+ Axis grid lines.
703
+ legend : bool or {'reverse'}
704
+ Place legend on axis subplots.
705
+ style : list or dict
706
+ The matplotlib line style per column.
707
+ logx : bool or 'sym', default False
708
+ Use log scaling or symlog scaling on x axis.
709
+
710
+ logy : bool or 'sym' default False
711
+ Use log scaling or symlog scaling on y axis.
712
+
713
+ loglog : bool or 'sym', default False
714
+ Use log scaling or symlog scaling on both x and y axes.
715
+
716
+ xticks : sequence
717
+ Values to use for the xticks.
718
+ yticks : sequence
719
+ Values to use for the yticks.
720
+ xlim : 2-tuple/list
721
+ Set the x limits of the current axes.
722
+ ylim : 2-tuple/list
723
+ Set the y limits of the current axes.
724
+ xlabel : label, optional
725
+ Name to use for the xlabel on x-axis. Default uses index name as xlabel, or the
726
+ x-column name for planar plots.
727
+
728
+ .. versionchanged:: 2.0.0
729
+
730
+ Now applicable to histograms.
731
+
732
+ ylabel : label, optional
733
+ Name to use for the ylabel on y-axis. Default will show no ylabel, or the
734
+ y-column name for planar plots.
735
+
736
+ .. versionchanged:: 2.0.0
737
+
738
+ Now applicable to histograms.
739
+
740
+ rot : float, default None
741
+ Rotation for ticks (xticks for vertical, yticks for horizontal
742
+ plots).
743
+ fontsize : float, default None
744
+ Font size for xticks and yticks.
745
+ colormap : str or matplotlib colormap object, default None
746
+ Colormap to select colors from. If string, load colormap with that
747
+ name from matplotlib.
748
+ colorbar : bool, optional
749
+ If True, plot colorbar (only relevant for 'scatter' and 'hexbin'
750
+ plots).
751
+ position : float
752
+ Specify relative alignments for bar plot layout.
753
+ From 0 (left/bottom-end) to 1 (right/top-end). Default is 0.5
754
+ (center).
755
+ table : bool, Series or DataFrame, default False
756
+ If True, draw a table using the data in the DataFrame and the data
757
+ will be transposed to meet matplotlib's default layout.
758
+ If a Series or DataFrame is passed, use passed data to draw a
759
+ table.
760
+ yerr : DataFrame, Series, array-like, dict and str
761
+ See :ref:`Plotting with Error Bars <visualization.errorbars>` for
762
+ detail.
763
+ xerr : DataFrame, Series, array-like, dict and str
764
+ Equivalent to yerr.
765
+ stacked : bool, default False in line and bar plots, and True in area plot
766
+ If True, create stacked plot.
767
+ secondary_y : bool or sequence, default False
768
+ Whether to plot on the secondary y-axis if a list/tuple, which
769
+ columns to plot on secondary y-axis.
770
+ mark_right : bool, default True
771
+ When using a secondary_y axis, automatically mark the column
772
+ labels with "(right)" in the legend.
773
+ include_bool : bool, default is False
774
+ If True, boolean values can be plotted.
775
+ backend : str, default None
776
+ Backend to use instead of the backend specified in the option
777
+ ``plotting.backend``. For instance, 'matplotlib'. Alternatively, to
778
+ specify the ``plotting.backend`` for the whole session, set
779
+ ``pd.options.plotting.backend``.
780
+ **kwargs
781
+ Options to pass to matplotlib plotting method.
782
+
783
+ Returns
784
+ -------
785
+ :class:`matplotlib.axes.Axes` or numpy.ndarray of them
786
+ If the backend is not the default matplotlib one, the return value
787
+ will be the object returned by the backend.
788
+
789
+ Notes
790
+ -----
791
+ - See matplotlib documentation online for more on this subject
792
+ - If `kind` = 'bar' or 'barh', you can specify relative alignments
793
+ for bar plot layout by `position` keyword.
794
+ From 0 (left/bottom-end) to 1 (right/top-end). Default is 0.5
795
+ (center)
796
+
797
+ Examples
798
+ --------
799
+ For Series:
800
+
801
+ .. plot::
802
+ :context: close-figs
803
+
804
+ >>> ser = pd.Series([1, 2, 3, 3])
805
+ >>> plot = ser.plot(kind='hist', title="My plot")
806
+
807
+ For DataFrame:
808
+
809
+ .. plot::
810
+ :context: close-figs
811
+
812
+ >>> df = pd.DataFrame({'length': [1.5, 0.5, 1.2, 0.9, 3],
813
+ ... 'width': [0.7, 0.2, 0.15, 0.2, 1.1]},
814
+ ... index=['pig', 'rabbit', 'duck', 'chicken', 'horse'])
815
+ >>> plot = df.plot(title="DataFrame Plot")
816
+
817
+ For SeriesGroupBy:
818
+
819
+ .. plot::
820
+ :context: close-figs
821
+
822
+ >>> lst = [-1, -2, -3, 1, 2, 3]
823
+ >>> ser = pd.Series([1, 2, 2, 4, 6, 6], index=lst)
824
+ >>> plot = ser.groupby(lambda x: x > 0).plot(title="SeriesGroupBy Plot")
825
+
826
+ For DataFrameGroupBy:
827
+
828
+ .. plot::
829
+ :context: close-figs
830
+
831
+ >>> df = pd.DataFrame({"col1" : [1, 2, 3, 4],
832
+ ... "col2" : ["A", "B", "A", "B"]})
833
+ >>> plot = df.groupby("col2").plot(kind="bar", title="DataFrameGroupBy Plot")
834
+ """
835
+
836
+ _common_kinds = ("line", "bar", "barh", "kde", "density", "area", "hist", "box")
837
+ _series_kinds = ("pie",)
838
+ _dataframe_kinds = ("scatter", "hexbin")
839
+ _kind_aliases = {"density": "kde"}
840
+ _all_kinds = _common_kinds + _series_kinds + _dataframe_kinds
841
+
842
+ def __init__(self, data: Series | DataFrame) -> None:
843
+ self._parent = data
844
+
845
+ @staticmethod
846
+ def _get_call_args(backend_name: str, data: Series | DataFrame, args, kwargs):
847
+ """
848
+ This function makes calls to this accessor `__call__` method compatible
849
+ with the previous `SeriesPlotMethods.__call__` and
850
+ `DataFramePlotMethods.__call__`. Those had slightly different
851
+ signatures, since `DataFramePlotMethods` accepted `x` and `y`
852
+ parameters.
853
+ """
854
+ if isinstance(data, ABCSeries):
855
+ arg_def = [
856
+ ("kind", "line"),
857
+ ("ax", None),
858
+ ("figsize", None),
859
+ ("use_index", True),
860
+ ("title", None),
861
+ ("grid", None),
862
+ ("legend", False),
863
+ ("style", None),
864
+ ("logx", False),
865
+ ("logy", False),
866
+ ("loglog", False),
867
+ ("xticks", None),
868
+ ("yticks", None),
869
+ ("xlim", None),
870
+ ("ylim", None),
871
+ ("rot", None),
872
+ ("fontsize", None),
873
+ ("colormap", None),
874
+ ("table", False),
875
+ ("yerr", None),
876
+ ("xerr", None),
877
+ ("label", None),
878
+ ("secondary_y", False),
879
+ ("xlabel", None),
880
+ ("ylabel", None),
881
+ ]
882
+ elif isinstance(data, ABCDataFrame):
883
+ arg_def = [
884
+ ("x", None),
885
+ ("y", None),
886
+ ("kind", "line"),
887
+ ("ax", None),
888
+ ("subplots", False),
889
+ ("sharex", None),
890
+ ("sharey", False),
891
+ ("layout", None),
892
+ ("figsize", None),
893
+ ("use_index", True),
894
+ ("title", None),
895
+ ("grid", None),
896
+ ("legend", True),
897
+ ("style", None),
898
+ ("logx", False),
899
+ ("logy", False),
900
+ ("loglog", False),
901
+ ("xticks", None),
902
+ ("yticks", None),
903
+ ("xlim", None),
904
+ ("ylim", None),
905
+ ("rot", None),
906
+ ("fontsize", None),
907
+ ("colormap", None),
908
+ ("table", False),
909
+ ("yerr", None),
910
+ ("xerr", None),
911
+ ("secondary_y", False),
912
+ ("xlabel", None),
913
+ ("ylabel", None),
914
+ ]
915
+ else:
916
+ raise TypeError(
917
+ f"Called plot accessor for type {type(data).__name__}, "
918
+ "expected Series or DataFrame"
919
+ )
920
+
921
+ if args and isinstance(data, ABCSeries):
922
+ positional_args = str(args)[1:-1]
923
+ keyword_args = ", ".join(
924
+ [f"{name}={repr(value)}" for (name, _), value in zip(arg_def, args)]
925
+ )
926
+ msg = (
927
+ "`Series.plot()` should not be called with positional "
928
+ "arguments, only keyword arguments. The order of "
929
+ "positional arguments will change in the future. "
930
+ f"Use `Series.plot({keyword_args})` instead of "
931
+ f"`Series.plot({positional_args})`."
932
+ )
933
+ raise TypeError(msg)
934
+
935
+ pos_args = {name: value for (name, _), value in zip(arg_def, args)}
936
+ if backend_name == "pandas.plotting._matplotlib":
937
+ kwargs = dict(arg_def, **pos_args, **kwargs)
938
+ else:
939
+ kwargs = dict(pos_args, **kwargs)
940
+
941
+ x = kwargs.pop("x", None)
942
+ y = kwargs.pop("y", None)
943
+ kind = kwargs.pop("kind", "line")
944
+ return x, y, kind, kwargs
945
+
946
+ def __call__(self, *args, **kwargs):
947
+ plot_backend = _get_plot_backend(kwargs.pop("backend", None))
948
+
949
+ x, y, kind, kwargs = self._get_call_args(
950
+ plot_backend.__name__, self._parent, args, kwargs
951
+ )
952
+
953
+ kind = self._kind_aliases.get(kind, kind)
954
+
955
+ # when using another backend, get out of the way
956
+ if plot_backend.__name__ != "pandas.plotting._matplotlib":
957
+ return plot_backend.plot(self._parent, x=x, y=y, kind=kind, **kwargs)
958
+
959
+ if kind not in self._all_kinds:
960
+ raise ValueError(
961
+ f"{kind} is not a valid plot kind "
962
+ f"Valid plot kinds: {self._all_kinds}"
963
+ )
964
+
965
+ # The original data structured can be transformed before passed to the
966
+ # backend. For example, for DataFrame is common to set the index as the
967
+ # `x` parameter, and return a Series with the parameter `y` as values.
968
+ data = self._parent.copy()
969
+
970
+ if isinstance(data, ABCSeries):
971
+ kwargs["reuse_plot"] = True
972
+
973
+ if kind in self._dataframe_kinds:
974
+ if isinstance(data, ABCDataFrame):
975
+ return plot_backend.plot(data, x=x, y=y, kind=kind, **kwargs)
976
+ else:
977
+ raise ValueError(f"plot kind {kind} can only be used for data frames")
978
+ elif kind in self._series_kinds:
979
+ if isinstance(data, ABCDataFrame):
980
+ if y is None and kwargs.get("subplots") is False:
981
+ raise ValueError(
982
+ f"{kind} requires either y column or 'subplots=True'"
983
+ )
984
+ if y is not None:
985
+ if is_integer(y) and not data.columns._holds_integer():
986
+ y = data.columns[y]
987
+ # converted to series actually. copy to not modify
988
+ data = data[y].copy()
989
+ data.index.name = y
990
+ elif isinstance(data, ABCDataFrame):
991
+ data_cols = data.columns
992
+ if x is not None:
993
+ if is_integer(x) and not data.columns._holds_integer():
994
+ x = data_cols[x]
995
+ elif not isinstance(data[x], ABCSeries):
996
+ raise ValueError("x must be a label or position")
997
+ data = data.set_index(x)
998
+ if y is not None:
999
+ # check if we have y as int or list of ints
1000
+ int_ylist = is_list_like(y) and all(is_integer(c) for c in y)
1001
+ int_y_arg = is_integer(y) or int_ylist
1002
+ if int_y_arg and not data.columns._holds_integer():
1003
+ y = data_cols[y]
1004
+
1005
+ label_kw = kwargs["label"] if "label" in kwargs else False
1006
+ for kw in ["xerr", "yerr"]:
1007
+ if kw in kwargs and (
1008
+ isinstance(kwargs[kw], str) or is_integer(kwargs[kw])
1009
+ ):
1010
+ try:
1011
+ kwargs[kw] = data[kwargs[kw]]
1012
+ except (IndexError, KeyError, TypeError):
1013
+ pass
1014
+
1015
+ # don't overwrite
1016
+ data = data[y].copy()
1017
+
1018
+ if isinstance(data, ABCSeries):
1019
+ label_name = label_kw or y
1020
+ data.name = label_name
1021
+ else:
1022
+ match = is_list_like(label_kw) and len(label_kw) == len(y)
1023
+ if label_kw and not match:
1024
+ raise ValueError(
1025
+ "label should be list-like and same length as y"
1026
+ )
1027
+ label_name = label_kw or data.columns
1028
+ data.columns = label_name
1029
+
1030
+ return plot_backend.plot(data, kind=kind, **kwargs)
1031
+
1032
+ __call__.__doc__ = __doc__
1033
+
1034
+ @Appender(
1035
+ """
1036
+ See Also
1037
+ --------
1038
+ matplotlib.pyplot.plot : Plot y versus x as lines and/or markers.
1039
+
1040
+ Examples
1041
+ --------
1042
+
1043
+ .. plot::
1044
+ :context: close-figs
1045
+
1046
+ >>> s = pd.Series([1, 3, 2])
1047
+ >>> s.plot.line() # doctest: +SKIP
1048
+
1049
+ .. plot::
1050
+ :context: close-figs
1051
+
1052
+ The following example shows the populations for some animals
1053
+ over the years.
1054
+
1055
+ >>> df = pd.DataFrame({
1056
+ ... 'pig': [20, 18, 489, 675, 1776],
1057
+ ... 'horse': [4, 25, 281, 600, 1900]
1058
+ ... }, index=[1990, 1997, 2003, 2009, 2014])
1059
+ >>> lines = df.plot.line()
1060
+
1061
+ .. plot::
1062
+ :context: close-figs
1063
+
1064
+ An example with subplots, so an array of axes is returned.
1065
+
1066
+ >>> axes = df.plot.line(subplots=True)
1067
+ >>> type(axes)
1068
+ <class 'numpy.ndarray'>
1069
+
1070
+ .. plot::
1071
+ :context: close-figs
1072
+
1073
+ Let's repeat the same example, but specifying colors for
1074
+ each column (in this case, for each animal).
1075
+
1076
+ >>> axes = df.plot.line(
1077
+ ... subplots=True, color={"pig": "pink", "horse": "#742802"}
1078
+ ... )
1079
+
1080
+ .. plot::
1081
+ :context: close-figs
1082
+
1083
+ The following example shows the relationship between both
1084
+ populations.
1085
+
1086
+ >>> lines = df.plot.line(x='pig', y='horse')
1087
+ """
1088
+ )
1089
+ @Substitution(kind="line")
1090
+ @Appender(_bar_or_line_doc)
1091
+ def line(
1092
+ self, x: Hashable | None = None, y: Hashable | None = None, **kwargs
1093
+ ) -> PlotAccessor:
1094
+ """
1095
+ Plot Series or DataFrame as lines.
1096
+
1097
+ This function is useful to plot lines using DataFrame's values
1098
+ as coordinates.
1099
+ """
1100
+ return self(kind="line", x=x, y=y, **kwargs)
1101
+
1102
+ @Appender(
1103
+ """
1104
+ See Also
1105
+ --------
1106
+ DataFrame.plot.barh : Horizontal bar plot.
1107
+ DataFrame.plot : Make plots of a DataFrame.
1108
+ matplotlib.pyplot.bar : Make a bar plot with matplotlib.
1109
+
1110
+ Examples
1111
+ --------
1112
+ Basic plot.
1113
+
1114
+ .. plot::
1115
+ :context: close-figs
1116
+
1117
+ >>> df = pd.DataFrame({'lab':['A', 'B', 'C'], 'val':[10, 30, 20]})
1118
+ >>> ax = df.plot.bar(x='lab', y='val', rot=0)
1119
+
1120
+ Plot a whole dataframe to a bar plot. Each column is assigned a
1121
+ distinct color, and each row is nested in a group along the
1122
+ horizontal axis.
1123
+
1124
+ .. plot::
1125
+ :context: close-figs
1126
+
1127
+ >>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
1128
+ >>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
1129
+ >>> index = ['snail', 'pig', 'elephant',
1130
+ ... 'rabbit', 'giraffe', 'coyote', 'horse']
1131
+ >>> df = pd.DataFrame({'speed': speed,
1132
+ ... 'lifespan': lifespan}, index=index)
1133
+ >>> ax = df.plot.bar(rot=0)
1134
+
1135
+ Plot stacked bar charts for the DataFrame
1136
+
1137
+ .. plot::
1138
+ :context: close-figs
1139
+
1140
+ >>> ax = df.plot.bar(stacked=True)
1141
+
1142
+ Instead of nesting, the figure can be split by column with
1143
+ ``subplots=True``. In this case, a :class:`numpy.ndarray` of
1144
+ :class:`matplotlib.axes.Axes` are returned.
1145
+
1146
+ .. plot::
1147
+ :context: close-figs
1148
+
1149
+ >>> axes = df.plot.bar(rot=0, subplots=True)
1150
+ >>> axes[1].legend(loc=2) # doctest: +SKIP
1151
+
1152
+ If you don't like the default colours, you can specify how you'd
1153
+ like each column to be colored.
1154
+
1155
+ .. plot::
1156
+ :context: close-figs
1157
+
1158
+ >>> axes = df.plot.bar(
1159
+ ... rot=0, subplots=True, color={"speed": "red", "lifespan": "green"}
1160
+ ... )
1161
+ >>> axes[1].legend(loc=2) # doctest: +SKIP
1162
+
1163
+ Plot a single column.
1164
+
1165
+ .. plot::
1166
+ :context: close-figs
1167
+
1168
+ >>> ax = df.plot.bar(y='speed', rot=0)
1169
+
1170
+ Plot only selected categories for the DataFrame.
1171
+
1172
+ .. plot::
1173
+ :context: close-figs
1174
+
1175
+ >>> ax = df.plot.bar(x='lifespan', rot=0)
1176
+ """
1177
+ )
1178
+ @Substitution(kind="bar")
1179
+ @Appender(_bar_or_line_doc)
1180
+ def bar( # pylint: disable=disallowed-name
1181
+ self, x: Hashable | None = None, y: Hashable | None = None, **kwargs
1182
+ ) -> PlotAccessor:
1183
+ """
1184
+ Vertical bar plot.
1185
+
1186
+ A bar plot is a plot that presents categorical data with
1187
+ rectangular bars with lengths proportional to the values that they
1188
+ represent. A bar plot shows comparisons among discrete categories. One
1189
+ axis of the plot shows the specific categories being compared, and the
1190
+ other axis represents a measured value.
1191
+ """
1192
+ return self(kind="bar", x=x, y=y, **kwargs)
1193
+
1194
+ @Appender(
1195
+ """
1196
+ See Also
1197
+ --------
1198
+ DataFrame.plot.bar: Vertical bar plot.
1199
+ DataFrame.plot : Make plots of DataFrame using matplotlib.
1200
+ matplotlib.axes.Axes.bar : Plot a vertical bar plot using matplotlib.
1201
+
1202
+ Examples
1203
+ --------
1204
+ Basic example
1205
+
1206
+ .. plot::
1207
+ :context: close-figs
1208
+
1209
+ >>> df = pd.DataFrame({'lab': ['A', 'B', 'C'], 'val': [10, 30, 20]})
1210
+ >>> ax = df.plot.barh(x='lab', y='val')
1211
+
1212
+ Plot a whole DataFrame to a horizontal bar plot
1213
+
1214
+ .. plot::
1215
+ :context: close-figs
1216
+
1217
+ >>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
1218
+ >>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
1219
+ >>> index = ['snail', 'pig', 'elephant',
1220
+ ... 'rabbit', 'giraffe', 'coyote', 'horse']
1221
+ >>> df = pd.DataFrame({'speed': speed,
1222
+ ... 'lifespan': lifespan}, index=index)
1223
+ >>> ax = df.plot.barh()
1224
+
1225
+ Plot stacked barh charts for the DataFrame
1226
+
1227
+ .. plot::
1228
+ :context: close-figs
1229
+
1230
+ >>> ax = df.plot.barh(stacked=True)
1231
+
1232
+ We can specify colors for each column
1233
+
1234
+ .. plot::
1235
+ :context: close-figs
1236
+
1237
+ >>> ax = df.plot.barh(color={"speed": "red", "lifespan": "green"})
1238
+
1239
+ Plot a column of the DataFrame to a horizontal bar plot
1240
+
1241
+ .. plot::
1242
+ :context: close-figs
1243
+
1244
+ >>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
1245
+ >>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
1246
+ >>> index = ['snail', 'pig', 'elephant',
1247
+ ... 'rabbit', 'giraffe', 'coyote', 'horse']
1248
+ >>> df = pd.DataFrame({'speed': speed,
1249
+ ... 'lifespan': lifespan}, index=index)
1250
+ >>> ax = df.plot.barh(y='speed')
1251
+
1252
+ Plot DataFrame versus the desired column
1253
+
1254
+ .. plot::
1255
+ :context: close-figs
1256
+
1257
+ >>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
1258
+ >>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
1259
+ >>> index = ['snail', 'pig', 'elephant',
1260
+ ... 'rabbit', 'giraffe', 'coyote', 'horse']
1261
+ >>> df = pd.DataFrame({'speed': speed,
1262
+ ... 'lifespan': lifespan}, index=index)
1263
+ >>> ax = df.plot.barh(x='lifespan')
1264
+ """
1265
+ )
1266
+ @Substitution(kind="bar")
1267
+ @Appender(_bar_or_line_doc)
1268
+ def barh(
1269
+ self, x: Hashable | None = None, y: Hashable | None = None, **kwargs
1270
+ ) -> PlotAccessor:
1271
+ """
1272
+ Make a horizontal bar plot.
1273
+
1274
+ A horizontal bar plot is a plot that presents quantitative data with
1275
+ rectangular bars with lengths proportional to the values that they
1276
+ represent. A bar plot shows comparisons among discrete categories. One
1277
+ axis of the plot shows the specific categories being compared, and the
1278
+ other axis represents a measured value.
1279
+ """
1280
+ return self(kind="barh", x=x, y=y, **kwargs)
1281
+
1282
+ def box(self, by: IndexLabel | None = None, **kwargs) -> PlotAccessor:
1283
+ r"""
1284
+ Make a box plot of the DataFrame columns.
1285
+
1286
+ A box plot is a method for graphically depicting groups of numerical
1287
+ data through their quartiles.
1288
+ The box extends from the Q1 to Q3 quartile values of the data,
1289
+ with a line at the median (Q2). The whiskers extend from the edges
1290
+ of box to show the range of the data. The position of the whiskers
1291
+ is set by default to 1.5*IQR (IQR = Q3 - Q1) from the edges of the
1292
+ box. Outlier points are those past the end of the whiskers.
1293
+
1294
+ For further details see Wikipedia's
1295
+ entry for `boxplot <https://en.wikipedia.org/wiki/Box_plot>`__.
1296
+
1297
+ A consideration when using this chart is that the box and the whiskers
1298
+ can overlap, which is very common when plotting small sets of data.
1299
+
1300
+ Parameters
1301
+ ----------
1302
+ by : str or sequence
1303
+ Column in the DataFrame to group by.
1304
+
1305
+ .. versionchanged:: 1.4.0
1306
+
1307
+ Previously, `by` is silently ignore and makes no groupings
1308
+
1309
+ **kwargs
1310
+ Additional keywords are documented in
1311
+ :meth:`DataFrame.plot`.
1312
+
1313
+ Returns
1314
+ -------
1315
+ :class:`matplotlib.axes.Axes` or numpy.ndarray of them
1316
+
1317
+ See Also
1318
+ --------
1319
+ DataFrame.boxplot: Another method to draw a box plot.
1320
+ Series.plot.box: Draw a box plot from a Series object.
1321
+ matplotlib.pyplot.boxplot: Draw a box plot in matplotlib.
1322
+
1323
+ Examples
1324
+ --------
1325
+ Draw a box plot from a DataFrame with four columns of randomly
1326
+ generated data.
1327
+
1328
+ .. plot::
1329
+ :context: close-figs
1330
+
1331
+ >>> data = np.random.randn(25, 4)
1332
+ >>> df = pd.DataFrame(data, columns=list('ABCD'))
1333
+ >>> ax = df.plot.box()
1334
+
1335
+ You can also generate groupings if you specify the `by` parameter (which
1336
+ can take a column name, or a list or tuple of column names):
1337
+
1338
+ .. versionchanged:: 1.4.0
1339
+
1340
+ .. plot::
1341
+ :context: close-figs
1342
+
1343
+ >>> age_list = [8, 10, 12, 14, 72, 74, 76, 78, 20, 25, 30, 35, 60, 85]
1344
+ >>> df = pd.DataFrame({"gender": list("MMMMMMMMFFFFFF"), "age": age_list})
1345
+ >>> ax = df.plot.box(column="age", by="gender", figsize=(10, 8))
1346
+ """
1347
+ return self(kind="box", by=by, **kwargs)
1348
+
1349
+ def hist(
1350
+ self, by: IndexLabel | None = None, bins: int = 10, **kwargs
1351
+ ) -> PlotAccessor:
1352
+ """
1353
+ Draw one histogram of the DataFrame's columns.
1354
+
1355
+ A histogram is a representation of the distribution of data.
1356
+ This function groups the values of all given Series in the DataFrame
1357
+ into bins and draws all bins in one :class:`matplotlib.axes.Axes`.
1358
+ This is useful when the DataFrame's Series are in a similar scale.
1359
+
1360
+ Parameters
1361
+ ----------
1362
+ by : str or sequence, optional
1363
+ Column in the DataFrame to group by.
1364
+
1365
+ .. versionchanged:: 1.4.0
1366
+
1367
+ Previously, `by` is silently ignore and makes no groupings
1368
+
1369
+ bins : int, default 10
1370
+ Number of histogram bins to be used.
1371
+ **kwargs
1372
+ Additional keyword arguments are documented in
1373
+ :meth:`DataFrame.plot`.
1374
+
1375
+ Returns
1376
+ -------
1377
+ class:`matplotlib.AxesSubplot`
1378
+ Return a histogram plot.
1379
+
1380
+ See Also
1381
+ --------
1382
+ DataFrame.hist : Draw histograms per DataFrame's Series.
1383
+ Series.hist : Draw a histogram with Series' data.
1384
+
1385
+ Examples
1386
+ --------
1387
+ When we roll a die 6000 times, we expect to get each value around 1000
1388
+ times. But when we roll two dice and sum the result, the distribution
1389
+ is going to be quite different. A histogram illustrates those
1390
+ distributions.
1391
+
1392
+ .. plot::
1393
+ :context: close-figs
1394
+
1395
+ >>> df = pd.DataFrame(np.random.randint(1, 7, 6000), columns=['one'])
1396
+ >>> df['two'] = df['one'] + np.random.randint(1, 7, 6000)
1397
+ >>> ax = df.plot.hist(bins=12, alpha=0.5)
1398
+
1399
+ A grouped histogram can be generated by providing the parameter `by` (which
1400
+ can be a column name, or a list of column names):
1401
+
1402
+ .. plot::
1403
+ :context: close-figs
1404
+
1405
+ >>> age_list = [8, 10, 12, 14, 72, 74, 76, 78, 20, 25, 30, 35, 60, 85]
1406
+ >>> df = pd.DataFrame({"gender": list("MMMMMMMMFFFFFF"), "age": age_list})
1407
+ >>> ax = df.plot.hist(column=["age"], by="gender", figsize=(10, 8))
1408
+ """
1409
+ return self(kind="hist", by=by, bins=bins, **kwargs)
1410
+
1411
+ def kde(
1412
+ self,
1413
+ bw_method: Literal["scott", "silverman"] | float | Callable | None = None,
1414
+ ind: np.ndarray | int | None = None,
1415
+ **kwargs,
1416
+ ) -> PlotAccessor:
1417
+ """
1418
+ Generate Kernel Density Estimate plot using Gaussian kernels.
1419
+
1420
+ In statistics, `kernel density estimation`_ (KDE) is a non-parametric
1421
+ way to estimate the probability density function (PDF) of a random
1422
+ variable. This function uses Gaussian kernels and includes automatic
1423
+ bandwidth determination.
1424
+
1425
+ .. _kernel density estimation:
1426
+ https://en.wikipedia.org/wiki/Kernel_density_estimation
1427
+
1428
+ Parameters
1429
+ ----------
1430
+ bw_method : str, scalar or callable, optional
1431
+ The method used to calculate the estimator bandwidth. This can be
1432
+ 'scott', 'silverman', a scalar constant or a callable.
1433
+ If None (default), 'scott' is used.
1434
+ See :class:`scipy.stats.gaussian_kde` for more information.
1435
+ ind : NumPy array or int, optional
1436
+ Evaluation points for the estimated PDF. If None (default),
1437
+ 1000 equally spaced points are used. If `ind` is a NumPy array, the
1438
+ KDE is evaluated at the points passed. If `ind` is an integer,
1439
+ `ind` number of equally spaced points are used.
1440
+ **kwargs
1441
+ Additional keyword arguments are documented in
1442
+ :meth:`DataFrame.plot`.
1443
+
1444
+ Returns
1445
+ -------
1446
+ matplotlib.axes.Axes or numpy.ndarray of them
1447
+
1448
+ See Also
1449
+ --------
1450
+ scipy.stats.gaussian_kde : Representation of a kernel-density
1451
+ estimate using Gaussian kernels. This is the function used
1452
+ internally to estimate the PDF.
1453
+
1454
+ Examples
1455
+ --------
1456
+ Given a Series of points randomly sampled from an unknown
1457
+ distribution, estimate its PDF using KDE with automatic
1458
+ bandwidth determination and plot the results, evaluating them at
1459
+ 1000 equally spaced points (default):
1460
+
1461
+ .. plot::
1462
+ :context: close-figs
1463
+
1464
+ >>> s = pd.Series([1, 2, 2.5, 3, 3.5, 4, 5])
1465
+ >>> ax = s.plot.kde()
1466
+
1467
+ A scalar bandwidth can be specified. Using a small bandwidth value can
1468
+ lead to over-fitting, while using a large bandwidth value may result
1469
+ in under-fitting:
1470
+
1471
+ .. plot::
1472
+ :context: close-figs
1473
+
1474
+ >>> ax = s.plot.kde(bw_method=0.3)
1475
+
1476
+ .. plot::
1477
+ :context: close-figs
1478
+
1479
+ >>> ax = s.plot.kde(bw_method=3)
1480
+
1481
+ Finally, the `ind` parameter determines the evaluation points for the
1482
+ plot of the estimated PDF:
1483
+
1484
+ .. plot::
1485
+ :context: close-figs
1486
+
1487
+ >>> ax = s.plot.kde(ind=[1, 2, 3, 4, 5])
1488
+
1489
+ For DataFrame, it works in the same way:
1490
+
1491
+ .. plot::
1492
+ :context: close-figs
1493
+
1494
+ >>> df = pd.DataFrame({
1495
+ ... 'x': [1, 2, 2.5, 3, 3.5, 4, 5],
1496
+ ... 'y': [4, 4, 4.5, 5, 5.5, 6, 6],
1497
+ ... })
1498
+ >>> ax = df.plot.kde()
1499
+
1500
+ A scalar bandwidth can be specified. Using a small bandwidth value can
1501
+ lead to over-fitting, while using a large bandwidth value may result
1502
+ in under-fitting:
1503
+
1504
+ .. plot::
1505
+ :context: close-figs
1506
+
1507
+ >>> ax = df.plot.kde(bw_method=0.3)
1508
+
1509
+ .. plot::
1510
+ :context: close-figs
1511
+
1512
+ >>> ax = df.plot.kde(bw_method=3)
1513
+
1514
+ Finally, the `ind` parameter determines the evaluation points for the
1515
+ plot of the estimated PDF:
1516
+
1517
+ .. plot::
1518
+ :context: close-figs
1519
+
1520
+ >>> ax = df.plot.kde(ind=[1, 2, 3, 4, 5, 6])
1521
+ """
1522
+ return self(kind="kde", bw_method=bw_method, ind=ind, **kwargs)
1523
+
1524
+ density = kde
1525
+
1526
+ def area(
1527
+ self,
1528
+ x: Hashable | None = None,
1529
+ y: Hashable | None = None,
1530
+ stacked: bool = True,
1531
+ **kwargs,
1532
+ ) -> PlotAccessor:
1533
+ """
1534
+ Draw a stacked area plot.
1535
+
1536
+ An area plot displays quantitative data visually.
1537
+ This function wraps the matplotlib area function.
1538
+
1539
+ Parameters
1540
+ ----------
1541
+ x : label or position, optional
1542
+ Coordinates for the X axis. By default uses the index.
1543
+ y : label or position, optional
1544
+ Column to plot. By default uses all columns.
1545
+ stacked : bool, default True
1546
+ Area plots are stacked by default. Set to False to create a
1547
+ unstacked plot.
1548
+ **kwargs
1549
+ Additional keyword arguments are documented in
1550
+ :meth:`DataFrame.plot`.
1551
+
1552
+ Returns
1553
+ -------
1554
+ matplotlib.axes.Axes or numpy.ndarray
1555
+ Area plot, or array of area plots if subplots is True.
1556
+
1557
+ See Also
1558
+ --------
1559
+ DataFrame.plot : Make plots of DataFrame using matplotlib / pylab.
1560
+
1561
+ Examples
1562
+ --------
1563
+ Draw an area plot based on basic business metrics:
1564
+
1565
+ .. plot::
1566
+ :context: close-figs
1567
+
1568
+ >>> df = pd.DataFrame({
1569
+ ... 'sales': [3, 2, 3, 9, 10, 6],
1570
+ ... 'signups': [5, 5, 6, 12, 14, 13],
1571
+ ... 'visits': [20, 42, 28, 62, 81, 50],
1572
+ ... }, index=pd.date_range(start='2018/01/01', end='2018/07/01',
1573
+ ... freq='ME'))
1574
+ >>> ax = df.plot.area()
1575
+
1576
+ Area plots are stacked by default. To produce an unstacked plot,
1577
+ pass ``stacked=False``:
1578
+
1579
+ .. plot::
1580
+ :context: close-figs
1581
+
1582
+ >>> ax = df.plot.area(stacked=False)
1583
+
1584
+ Draw an area plot for a single column:
1585
+
1586
+ .. plot::
1587
+ :context: close-figs
1588
+
1589
+ >>> ax = df.plot.area(y='sales')
1590
+
1591
+ Draw with a different `x`:
1592
+
1593
+ .. plot::
1594
+ :context: close-figs
1595
+
1596
+ >>> df = pd.DataFrame({
1597
+ ... 'sales': [3, 2, 3],
1598
+ ... 'visits': [20, 42, 28],
1599
+ ... 'day': [1, 2, 3],
1600
+ ... })
1601
+ >>> ax = df.plot.area(x='day')
1602
+ """
1603
+ return self(kind="area", x=x, y=y, stacked=stacked, **kwargs)
1604
+
1605
+ def pie(self, **kwargs) -> PlotAccessor:
1606
+ """
1607
+ Generate a pie plot.
1608
+
1609
+ A pie plot is a proportional representation of the numerical data in a
1610
+ column. This function wraps :meth:`matplotlib.pyplot.pie` for the
1611
+ specified column. If no column reference is passed and
1612
+ ``subplots=True`` a pie plot is drawn for each numerical column
1613
+ independently.
1614
+
1615
+ Parameters
1616
+ ----------
1617
+ y : int or label, optional
1618
+ Label or position of the column to plot.
1619
+ If not provided, ``subplots=True`` argument must be passed.
1620
+ **kwargs
1621
+ Keyword arguments to pass on to :meth:`DataFrame.plot`.
1622
+
1623
+ Returns
1624
+ -------
1625
+ matplotlib.axes.Axes or np.ndarray of them
1626
+ A NumPy array is returned when `subplots` is True.
1627
+
1628
+ See Also
1629
+ --------
1630
+ Series.plot.pie : Generate a pie plot for a Series.
1631
+ DataFrame.plot : Make plots of a DataFrame.
1632
+
1633
+ Examples
1634
+ --------
1635
+ In the example below we have a DataFrame with the information about
1636
+ planet's mass and radius. We pass the 'mass' column to the
1637
+ pie function to get a pie plot.
1638
+
1639
+ .. plot::
1640
+ :context: close-figs
1641
+
1642
+ >>> df = pd.DataFrame({'mass': [0.330, 4.87 , 5.97],
1643
+ ... 'radius': [2439.7, 6051.8, 6378.1]},
1644
+ ... index=['Mercury', 'Venus', 'Earth'])
1645
+ >>> plot = df.plot.pie(y='mass', figsize=(5, 5))
1646
+
1647
+ .. plot::
1648
+ :context: close-figs
1649
+
1650
+ >>> plot = df.plot.pie(subplots=True, figsize=(11, 6))
1651
+ """
1652
+ if (
1653
+ isinstance(self._parent, ABCDataFrame)
1654
+ and kwargs.get("y", None) is None
1655
+ and not kwargs.get("subplots", False)
1656
+ ):
1657
+ raise ValueError("pie requires either y column or 'subplots=True'")
1658
+ return self(kind="pie", **kwargs)
1659
+
1660
+ def scatter(
1661
+ self,
1662
+ x: Hashable,
1663
+ y: Hashable,
1664
+ s: Hashable | Sequence[Hashable] | None = None,
1665
+ c: Hashable | Sequence[Hashable] | None = None,
1666
+ **kwargs,
1667
+ ) -> PlotAccessor:
1668
+ """
1669
+ Create a scatter plot with varying marker point size and color.
1670
+
1671
+ The coordinates of each point are defined by two dataframe columns and
1672
+ filled circles are used to represent each point. This kind of plot is
1673
+ useful to see complex correlations between two variables. Points could
1674
+ be for instance natural 2D coordinates like longitude and latitude in
1675
+ a map or, in general, any pair of metrics that can be plotted against
1676
+ each other.
1677
+
1678
+ Parameters
1679
+ ----------
1680
+ x : int or str
1681
+ The column name or column position to be used as horizontal
1682
+ coordinates for each point.
1683
+ y : int or str
1684
+ The column name or column position to be used as vertical
1685
+ coordinates for each point.
1686
+ s : str, scalar or array-like, optional
1687
+ The size of each point. Possible values are:
1688
+
1689
+ - A string with the name of the column to be used for marker's size.
1690
+
1691
+ - A single scalar so all points have the same size.
1692
+
1693
+ - A sequence of scalars, which will be used for each point's size
1694
+ recursively. For instance, when passing [2,14] all points size
1695
+ will be either 2 or 14, alternatively.
1696
+
1697
+ c : str, int or array-like, optional
1698
+ The color of each point. Possible values are:
1699
+
1700
+ - A single color string referred to by name, RGB or RGBA code,
1701
+ for instance 'red' or '#a98d19'.
1702
+
1703
+ - A sequence of color strings referred to by name, RGB or RGBA
1704
+ code, which will be used for each point's color recursively. For
1705
+ instance ['green','yellow'] all points will be filled in green or
1706
+ yellow, alternatively.
1707
+
1708
+ - A column name or position whose values will be used to color the
1709
+ marker points according to a colormap.
1710
+
1711
+ **kwargs
1712
+ Keyword arguments to pass on to :meth:`DataFrame.plot`.
1713
+
1714
+ Returns
1715
+ -------
1716
+ :class:`matplotlib.axes.Axes` or numpy.ndarray of them
1717
+
1718
+ See Also
1719
+ --------
1720
+ matplotlib.pyplot.scatter : Scatter plot using multiple input data
1721
+ formats.
1722
+
1723
+ Examples
1724
+ --------
1725
+ Let's see how to draw a scatter plot using coordinates from the values
1726
+ in a DataFrame's columns.
1727
+
1728
+ .. plot::
1729
+ :context: close-figs
1730
+
1731
+ >>> df = pd.DataFrame([[5.1, 3.5, 0], [4.9, 3.0, 0], [7.0, 3.2, 1],
1732
+ ... [6.4, 3.2, 1], [5.9, 3.0, 2]],
1733
+ ... columns=['length', 'width', 'species'])
1734
+ >>> ax1 = df.plot.scatter(x='length',
1735
+ ... y='width',
1736
+ ... c='DarkBlue')
1737
+
1738
+ And now with the color determined by a column as well.
1739
+
1740
+ .. plot::
1741
+ :context: close-figs
1742
+
1743
+ >>> ax2 = df.plot.scatter(x='length',
1744
+ ... y='width',
1745
+ ... c='species',
1746
+ ... colormap='viridis')
1747
+ """
1748
+ return self(kind="scatter", x=x, y=y, s=s, c=c, **kwargs)
1749
+
1750
+ def hexbin(
1751
+ self,
1752
+ x: Hashable,
1753
+ y: Hashable,
1754
+ C: Hashable | None = None,
1755
+ reduce_C_function: Callable | None = None,
1756
+ gridsize: int | tuple[int, int] | None = None,
1757
+ **kwargs,
1758
+ ) -> PlotAccessor:
1759
+ """
1760
+ Generate a hexagonal binning plot.
1761
+
1762
+ Generate a hexagonal binning plot of `x` versus `y`. If `C` is `None`
1763
+ (the default), this is a histogram of the number of occurrences
1764
+ of the observations at ``(x[i], y[i])``.
1765
+
1766
+ If `C` is specified, specifies values at given coordinates
1767
+ ``(x[i], y[i])``. These values are accumulated for each hexagonal
1768
+ bin and then reduced according to `reduce_C_function`,
1769
+ having as default the NumPy's mean function (:meth:`numpy.mean`).
1770
+ (If `C` is specified, it must also be a 1-D sequence
1771
+ of the same length as `x` and `y`, or a column label.)
1772
+
1773
+ Parameters
1774
+ ----------
1775
+ x : int or str
1776
+ The column label or position for x points.
1777
+ y : int or str
1778
+ The column label or position for y points.
1779
+ C : int or str, optional
1780
+ The column label or position for the value of `(x, y)` point.
1781
+ reduce_C_function : callable, default `np.mean`
1782
+ Function of one argument that reduces all the values in a bin to
1783
+ a single number (e.g. `np.mean`, `np.max`, `np.sum`, `np.std`).
1784
+ gridsize : int or tuple of (int, int), default 100
1785
+ The number of hexagons in the x-direction.
1786
+ The corresponding number of hexagons in the y-direction is
1787
+ chosen in a way that the hexagons are approximately regular.
1788
+ Alternatively, gridsize can be a tuple with two elements
1789
+ specifying the number of hexagons in the x-direction and the
1790
+ y-direction.
1791
+ **kwargs
1792
+ Additional keyword arguments are documented in
1793
+ :meth:`DataFrame.plot`.
1794
+
1795
+ Returns
1796
+ -------
1797
+ matplotlib.AxesSubplot
1798
+ The matplotlib ``Axes`` on which the hexbin is plotted.
1799
+
1800
+ See Also
1801
+ --------
1802
+ DataFrame.plot : Make plots of a DataFrame.
1803
+ matplotlib.pyplot.hexbin : Hexagonal binning plot using matplotlib,
1804
+ the matplotlib function that is used under the hood.
1805
+
1806
+ Examples
1807
+ --------
1808
+ The following examples are generated with random data from
1809
+ a normal distribution.
1810
+
1811
+ .. plot::
1812
+ :context: close-figs
1813
+
1814
+ >>> n = 10000
1815
+ >>> df = pd.DataFrame({'x': np.random.randn(n),
1816
+ ... 'y': np.random.randn(n)})
1817
+ >>> ax = df.plot.hexbin(x='x', y='y', gridsize=20)
1818
+
1819
+ The next example uses `C` and `np.sum` as `reduce_C_function`.
1820
+ Note that `'observations'` values ranges from 1 to 5 but the result
1821
+ plot shows values up to more than 25. This is because of the
1822
+ `reduce_C_function`.
1823
+
1824
+ .. plot::
1825
+ :context: close-figs
1826
+
1827
+ >>> n = 500
1828
+ >>> df = pd.DataFrame({
1829
+ ... 'coord_x': np.random.uniform(-3, 3, size=n),
1830
+ ... 'coord_y': np.random.uniform(30, 50, size=n),
1831
+ ... 'observations': np.random.randint(1,5, size=n)
1832
+ ... })
1833
+ >>> ax = df.plot.hexbin(x='coord_x',
1834
+ ... y='coord_y',
1835
+ ... C='observations',
1836
+ ... reduce_C_function=np.sum,
1837
+ ... gridsize=10,
1838
+ ... cmap="viridis")
1839
+ """
1840
+ if reduce_C_function is not None:
1841
+ kwargs["reduce_C_function"] = reduce_C_function
1842
+ if gridsize is not None:
1843
+ kwargs["gridsize"] = gridsize
1844
+
1845
+ return self(kind="hexbin", x=x, y=y, C=C, **kwargs)
1846
+
1847
+
1848
+ _backends: dict[str, types.ModuleType] = {}
1849
+
1850
+
1851
+ def _load_backend(backend: str) -> types.ModuleType:
1852
+ """
1853
+ Load a pandas plotting backend.
1854
+
1855
+ Parameters
1856
+ ----------
1857
+ backend : str
1858
+ The identifier for the backend. Either an entrypoint item registered
1859
+ with importlib.metadata, "matplotlib", or a module name.
1860
+
1861
+ Returns
1862
+ -------
1863
+ types.ModuleType
1864
+ The imported backend.
1865
+ """
1866
+ from importlib.metadata import entry_points
1867
+
1868
+ if backend == "matplotlib":
1869
+ # Because matplotlib is an optional dependency and first-party backend,
1870
+ # we need to attempt an import here to raise an ImportError if needed.
1871
+ try:
1872
+ module = importlib.import_module("pandas.plotting._matplotlib")
1873
+ except ImportError:
1874
+ raise ImportError(
1875
+ "matplotlib is required for plotting when the "
1876
+ 'default backend "matplotlib" is selected.'
1877
+ ) from None
1878
+ return module
1879
+
1880
+ found_backend = False
1881
+
1882
+ eps = entry_points()
1883
+ key = "pandas_plotting_backends"
1884
+ # entry_points lost dict API ~ PY 3.10
1885
+ # https://github.com/python/importlib_metadata/issues/298
1886
+ if hasattr(eps, "select"):
1887
+ entry = eps.select(group=key)
1888
+ else:
1889
+ # Argument 2 to "get" of "dict" has incompatible type "Tuple[]";
1890
+ # expected "EntryPoints" [arg-type]
1891
+ entry = eps.get(key, ()) # type: ignore[arg-type]
1892
+ for entry_point in entry:
1893
+ found_backend = entry_point.name == backend
1894
+ if found_backend:
1895
+ module = entry_point.load()
1896
+ break
1897
+
1898
+ if not found_backend:
1899
+ # Fall back to unregistered, module name approach.
1900
+ try:
1901
+ module = importlib.import_module(backend)
1902
+ found_backend = True
1903
+ except ImportError:
1904
+ # We re-raise later on.
1905
+ pass
1906
+
1907
+ if found_backend:
1908
+ if hasattr(module, "plot"):
1909
+ # Validate that the interface is implemented when the option is set,
1910
+ # rather than at plot time.
1911
+ return module
1912
+
1913
+ raise ValueError(
1914
+ f"Could not find plotting backend '{backend}'. Ensure that you've "
1915
+ f"installed the package providing the '{backend}' entrypoint, or that "
1916
+ "the package has a top-level `.plot` method."
1917
+ )
1918
+
1919
+
1920
+ def _get_plot_backend(backend: str | None = None):
1921
+ """
1922
+ Return the plotting backend to use (e.g. `pandas.plotting._matplotlib`).
1923
+
1924
+ The plotting system of pandas uses matplotlib by default, but the idea here
1925
+ is that it can also work with other third-party backends. This function
1926
+ returns the module which provides a top-level `.plot` method that will
1927
+ actually do the plotting. The backend is specified from a string, which
1928
+ either comes from the keyword argument `backend`, or, if not specified, from
1929
+ the option `pandas.options.plotting.backend`. All the rest of the code in
1930
+ this file uses the backend specified there for the plotting.
1931
+
1932
+ The backend is imported lazily, as matplotlib is a soft dependency, and
1933
+ pandas can be used without it being installed.
1934
+
1935
+ Notes
1936
+ -----
1937
+ Modifies `_backends` with imported backend as a side effect.
1938
+ """
1939
+ backend_str: str = backend or get_option("plotting.backend")
1940
+
1941
+ if backend_str in _backends:
1942
+ return _backends[backend_str]
1943
+
1944
+ module = _load_backend(backend_str)
1945
+ _backends[backend_str] = module
1946
+ return module
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__init__.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ from pandas.plotting._matplotlib.boxplot import (
6
+ BoxPlot,
7
+ boxplot,
8
+ boxplot_frame,
9
+ boxplot_frame_groupby,
10
+ )
11
+ from pandas.plotting._matplotlib.converter import (
12
+ deregister,
13
+ register,
14
+ )
15
+ from pandas.plotting._matplotlib.core import (
16
+ AreaPlot,
17
+ BarhPlot,
18
+ BarPlot,
19
+ HexBinPlot,
20
+ LinePlot,
21
+ PiePlot,
22
+ ScatterPlot,
23
+ )
24
+ from pandas.plotting._matplotlib.hist import (
25
+ HistPlot,
26
+ KdePlot,
27
+ hist_frame,
28
+ hist_series,
29
+ )
30
+ from pandas.plotting._matplotlib.misc import (
31
+ andrews_curves,
32
+ autocorrelation_plot,
33
+ bootstrap_plot,
34
+ lag_plot,
35
+ parallel_coordinates,
36
+ radviz,
37
+ scatter_matrix,
38
+ )
39
+ from pandas.plotting._matplotlib.tools import table
40
+
41
+ if TYPE_CHECKING:
42
+ from pandas.plotting._matplotlib.core import MPLPlot
43
+
44
+ PLOT_CLASSES: dict[str, type[MPLPlot]] = {
45
+ "line": LinePlot,
46
+ "bar": BarPlot,
47
+ "barh": BarhPlot,
48
+ "box": BoxPlot,
49
+ "hist": HistPlot,
50
+ "kde": KdePlot,
51
+ "area": AreaPlot,
52
+ "pie": PiePlot,
53
+ "scatter": ScatterPlot,
54
+ "hexbin": HexBinPlot,
55
+ }
56
+
57
+
58
+ def plot(data, kind, **kwargs):
59
+ # Importing pyplot at the top of the file (before the converters are
60
+ # registered) causes problems in matplotlib 2 (converters seem to not
61
+ # work)
62
+ import matplotlib.pyplot as plt
63
+
64
+ if kwargs.pop("reuse_plot", False):
65
+ ax = kwargs.get("ax")
66
+ if ax is None and len(plt.get_fignums()) > 0:
67
+ with plt.rc_context():
68
+ ax = plt.gca()
69
+ kwargs["ax"] = getattr(ax, "left_ax", ax)
70
+ plot_obj = PLOT_CLASSES[kind](data, **kwargs)
71
+ plot_obj.generate()
72
+ plot_obj.draw()
73
+ return plot_obj.result
74
+
75
+
76
+ __all__ = [
77
+ "plot",
78
+ "hist_series",
79
+ "hist_frame",
80
+ "boxplot",
81
+ "boxplot_frame",
82
+ "boxplot_frame_groupby",
83
+ "table",
84
+ "andrews_curves",
85
+ "autocorrelation_plot",
86
+ "bootstrap_plot",
87
+ "lag_plot",
88
+ "parallel_coordinates",
89
+ "radviz",
90
+ "scatter_matrix",
91
+ "register",
92
+ "deregister",
93
+ ]
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.87 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/boxplot.cpython-310.pyc ADDED
Binary file (13.4 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/converter.cpython-310.pyc ADDED
Binary file (29.1 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/core.cpython-310.pyc ADDED
Binary file (50.1 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/groupby.cpython-310.pyc ADDED
Binary file (4.34 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/hist.cpython-310.pyc ADDED
Binary file (12.7 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/misc.cpython-310.pyc ADDED
Binary file (11.5 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/style.cpython-310.pyc ADDED
Binary file (8.8 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/timeseries.cpython-310.pyc ADDED
Binary file (8.03 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/__pycache__/tools.cpython-310.pyc ADDED
Binary file (11.8 kB). View file
 
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/boxplot.py ADDED
@@ -0,0 +1,572 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import (
4
+ TYPE_CHECKING,
5
+ Literal,
6
+ NamedTuple,
7
+ )
8
+ import warnings
9
+
10
+ from matplotlib.artist import setp
11
+ import numpy as np
12
+
13
+ from pandas._libs import lib
14
+ from pandas.util._decorators import cache_readonly
15
+ from pandas.util._exceptions import find_stack_level
16
+
17
+ from pandas.core.dtypes.common import is_dict_like
18
+ from pandas.core.dtypes.generic import ABCSeries
19
+ from pandas.core.dtypes.missing import remove_na_arraylike
20
+
21
+ import pandas as pd
22
+ import pandas.core.common as com
23
+
24
+ from pandas.io.formats.printing import pprint_thing
25
+ from pandas.plotting._matplotlib.core import (
26
+ LinePlot,
27
+ MPLPlot,
28
+ )
29
+ from pandas.plotting._matplotlib.groupby import create_iter_data_given_by
30
+ from pandas.plotting._matplotlib.style import get_standard_colors
31
+ from pandas.plotting._matplotlib.tools import (
32
+ create_subplots,
33
+ flatten_axes,
34
+ maybe_adjust_figure,
35
+ )
36
+
37
+ if TYPE_CHECKING:
38
+ from collections.abc import Collection
39
+
40
+ from matplotlib.axes import Axes
41
+ from matplotlib.figure import Figure
42
+ from matplotlib.lines import Line2D
43
+
44
+ from pandas._typing import MatplotlibColor
45
+
46
+
47
+ def _set_ticklabels(ax: Axes, labels: list[str], is_vertical: bool, **kwargs) -> None:
48
+ """Set the tick labels of a given axis.
49
+
50
+ Due to https://github.com/matplotlib/matplotlib/pull/17266, we need to handle the
51
+ case of repeated ticks (due to `FixedLocator`) and thus we duplicate the number of
52
+ labels.
53
+ """
54
+ ticks = ax.get_xticks() if is_vertical else ax.get_yticks()
55
+ if len(ticks) != len(labels):
56
+ i, remainder = divmod(len(ticks), len(labels))
57
+ assert remainder == 0, remainder
58
+ labels *= i
59
+ if is_vertical:
60
+ ax.set_xticklabels(labels, **kwargs)
61
+ else:
62
+ ax.set_yticklabels(labels, **kwargs)
63
+
64
+
65
+ class BoxPlot(LinePlot):
66
+ @property
67
+ def _kind(self) -> Literal["box"]:
68
+ return "box"
69
+
70
+ _layout_type = "horizontal"
71
+
72
+ _valid_return_types = (None, "axes", "dict", "both")
73
+
74
+ class BP(NamedTuple):
75
+ # namedtuple to hold results
76
+ ax: Axes
77
+ lines: dict[str, list[Line2D]]
78
+
79
+ def __init__(self, data, return_type: str = "axes", **kwargs) -> None:
80
+ if return_type not in self._valid_return_types:
81
+ raise ValueError("return_type must be {None, 'axes', 'dict', 'both'}")
82
+
83
+ self.return_type = return_type
84
+ # Do not call LinePlot.__init__ which may fill nan
85
+ MPLPlot.__init__(self, data, **kwargs) # pylint: disable=non-parent-init-called
86
+
87
+ if self.subplots:
88
+ # Disable label ax sharing. Otherwise, all subplots shows last
89
+ # column label
90
+ if self.orientation == "vertical":
91
+ self.sharex = False
92
+ else:
93
+ self.sharey = False
94
+
95
+ # error: Signature of "_plot" incompatible with supertype "MPLPlot"
96
+ @classmethod
97
+ def _plot( # type: ignore[override]
98
+ cls, ax: Axes, y: np.ndarray, column_num=None, return_type: str = "axes", **kwds
99
+ ):
100
+ ys: np.ndarray | list[np.ndarray]
101
+ if y.ndim == 2:
102
+ ys = [remove_na_arraylike(v) for v in y]
103
+ # Boxplot fails with empty arrays, so need to add a NaN
104
+ # if any cols are empty
105
+ # GH 8181
106
+ ys = [v if v.size > 0 else np.array([np.nan]) for v in ys]
107
+ else:
108
+ ys = remove_na_arraylike(y)
109
+ bp = ax.boxplot(ys, **kwds)
110
+
111
+ if return_type == "dict":
112
+ return bp, bp
113
+ elif return_type == "both":
114
+ return cls.BP(ax=ax, lines=bp), bp
115
+ else:
116
+ return ax, bp
117
+
118
+ def _validate_color_args(self, color, colormap):
119
+ if color is lib.no_default:
120
+ return None
121
+
122
+ if colormap is not None:
123
+ warnings.warn(
124
+ "'color' and 'colormap' cannot be used "
125
+ "simultaneously. Using 'color'",
126
+ stacklevel=find_stack_level(),
127
+ )
128
+
129
+ if isinstance(color, dict):
130
+ valid_keys = ["boxes", "whiskers", "medians", "caps"]
131
+ for key in color:
132
+ if key not in valid_keys:
133
+ raise ValueError(
134
+ f"color dict contains invalid key '{key}'. "
135
+ f"The key must be either {valid_keys}"
136
+ )
137
+ return color
138
+
139
+ @cache_readonly
140
+ def _color_attrs(self):
141
+ # get standard colors for default
142
+ # use 2 colors by default, for box/whisker and median
143
+ # flier colors isn't needed here
144
+ # because it can be specified by ``sym`` kw
145
+ return get_standard_colors(num_colors=3, colormap=self.colormap, color=None)
146
+
147
+ @cache_readonly
148
+ def _boxes_c(self):
149
+ return self._color_attrs[0]
150
+
151
+ @cache_readonly
152
+ def _whiskers_c(self):
153
+ return self._color_attrs[0]
154
+
155
+ @cache_readonly
156
+ def _medians_c(self):
157
+ return self._color_attrs[2]
158
+
159
+ @cache_readonly
160
+ def _caps_c(self):
161
+ return self._color_attrs[0]
162
+
163
+ def _get_colors(
164
+ self,
165
+ num_colors=None,
166
+ color_kwds: dict[str, MatplotlibColor]
167
+ | MatplotlibColor
168
+ | Collection[MatplotlibColor]
169
+ | None = "color",
170
+ ) -> None:
171
+ pass
172
+
173
+ def maybe_color_bp(self, bp) -> None:
174
+ if isinstance(self.color, dict):
175
+ boxes = self.color.get("boxes", self._boxes_c)
176
+ whiskers = self.color.get("whiskers", self._whiskers_c)
177
+ medians = self.color.get("medians", self._medians_c)
178
+ caps = self.color.get("caps", self._caps_c)
179
+ else:
180
+ # Other types are forwarded to matplotlib
181
+ # If None, use default colors
182
+ boxes = self.color or self._boxes_c
183
+ whiskers = self.color or self._whiskers_c
184
+ medians = self.color or self._medians_c
185
+ caps = self.color or self._caps_c
186
+
187
+ color_tup = (boxes, whiskers, medians, caps)
188
+ maybe_color_bp(bp, color_tup=color_tup, **self.kwds)
189
+
190
+ def _make_plot(self, fig: Figure) -> None:
191
+ if self.subplots:
192
+ self._return_obj = pd.Series(dtype=object)
193
+
194
+ # Re-create iterated data if `by` is assigned by users
195
+ data = (
196
+ create_iter_data_given_by(self.data, self._kind)
197
+ if self.by is not None
198
+ else self.data
199
+ )
200
+
201
+ # error: Argument "data" to "_iter_data" of "MPLPlot" has
202
+ # incompatible type "object"; expected "DataFrame |
203
+ # dict[Hashable, Series | DataFrame]"
204
+ for i, (label, y) in enumerate(self._iter_data(data=data)): # type: ignore[arg-type]
205
+ ax = self._get_ax(i)
206
+ kwds = self.kwds.copy()
207
+
208
+ # When by is applied, show title for subplots to know which group it is
209
+ # just like df.boxplot, and need to apply T on y to provide right input
210
+ if self.by is not None:
211
+ y = y.T
212
+ ax.set_title(pprint_thing(label))
213
+
214
+ # When `by` is assigned, the ticklabels will become unique grouped
215
+ # values, instead of label which is used as subtitle in this case.
216
+ # error: "Index" has no attribute "levels"; maybe "nlevels"?
217
+ levels = self.data.columns.levels # type: ignore[attr-defined]
218
+ ticklabels = [pprint_thing(col) for col in levels[0]]
219
+ else:
220
+ ticklabels = [pprint_thing(label)]
221
+
222
+ ret, bp = self._plot(
223
+ ax, y, column_num=i, return_type=self.return_type, **kwds
224
+ )
225
+ self.maybe_color_bp(bp)
226
+ self._return_obj[label] = ret
227
+ _set_ticklabels(
228
+ ax=ax, labels=ticklabels, is_vertical=self.orientation == "vertical"
229
+ )
230
+ else:
231
+ y = self.data.values.T
232
+ ax = self._get_ax(0)
233
+ kwds = self.kwds.copy()
234
+
235
+ ret, bp = self._plot(
236
+ ax, y, column_num=0, return_type=self.return_type, **kwds
237
+ )
238
+ self.maybe_color_bp(bp)
239
+ self._return_obj = ret
240
+
241
+ labels = [pprint_thing(left) for left in self.data.columns]
242
+ if not self.use_index:
243
+ labels = [pprint_thing(key) for key in range(len(labels))]
244
+ _set_ticklabels(
245
+ ax=ax, labels=labels, is_vertical=self.orientation == "vertical"
246
+ )
247
+
248
+ def _make_legend(self) -> None:
249
+ pass
250
+
251
+ def _post_plot_logic(self, ax: Axes, data) -> None:
252
+ # GH 45465: make sure that the boxplot doesn't ignore xlabel/ylabel
253
+ if self.xlabel:
254
+ ax.set_xlabel(pprint_thing(self.xlabel))
255
+ if self.ylabel:
256
+ ax.set_ylabel(pprint_thing(self.ylabel))
257
+
258
+ @property
259
+ def orientation(self) -> Literal["horizontal", "vertical"]:
260
+ if self.kwds.get("vert", True):
261
+ return "vertical"
262
+ else:
263
+ return "horizontal"
264
+
265
+ @property
266
+ def result(self):
267
+ if self.return_type is None:
268
+ return super().result
269
+ else:
270
+ return self._return_obj
271
+
272
+
273
+ def maybe_color_bp(bp, color_tup, **kwds) -> None:
274
+ # GH#30346, when users specifying those arguments explicitly, our defaults
275
+ # for these four kwargs should be overridden; if not, use Pandas settings
276
+ if not kwds.get("boxprops"):
277
+ setp(bp["boxes"], color=color_tup[0], alpha=1)
278
+ if not kwds.get("whiskerprops"):
279
+ setp(bp["whiskers"], color=color_tup[1], alpha=1)
280
+ if not kwds.get("medianprops"):
281
+ setp(bp["medians"], color=color_tup[2], alpha=1)
282
+ if not kwds.get("capprops"):
283
+ setp(bp["caps"], color=color_tup[3], alpha=1)
284
+
285
+
286
+ def _grouped_plot_by_column(
287
+ plotf,
288
+ data,
289
+ columns=None,
290
+ by=None,
291
+ numeric_only: bool = True,
292
+ grid: bool = False,
293
+ figsize: tuple[float, float] | None = None,
294
+ ax=None,
295
+ layout=None,
296
+ return_type=None,
297
+ **kwargs,
298
+ ):
299
+ grouped = data.groupby(by, observed=False)
300
+ if columns is None:
301
+ if not isinstance(by, (list, tuple)):
302
+ by = [by]
303
+ columns = data._get_numeric_data().columns.difference(by)
304
+ naxes = len(columns)
305
+ fig, axes = create_subplots(
306
+ naxes=naxes,
307
+ sharex=kwargs.pop("sharex", True),
308
+ sharey=kwargs.pop("sharey", True),
309
+ figsize=figsize,
310
+ ax=ax,
311
+ layout=layout,
312
+ )
313
+
314
+ _axes = flatten_axes(axes)
315
+
316
+ # GH 45465: move the "by" label based on "vert"
317
+ xlabel, ylabel = kwargs.pop("xlabel", None), kwargs.pop("ylabel", None)
318
+ if kwargs.get("vert", True):
319
+ xlabel = xlabel or by
320
+ else:
321
+ ylabel = ylabel or by
322
+
323
+ ax_values = []
324
+
325
+ for i, col in enumerate(columns):
326
+ ax = _axes[i]
327
+ gp_col = grouped[col]
328
+ keys, values = zip(*gp_col)
329
+ re_plotf = plotf(keys, values, ax, xlabel=xlabel, ylabel=ylabel, **kwargs)
330
+ ax.set_title(col)
331
+ ax_values.append(re_plotf)
332
+ ax.grid(grid)
333
+
334
+ result = pd.Series(ax_values, index=columns, copy=False)
335
+
336
+ # Return axes in multiplot case, maybe revisit later # 985
337
+ if return_type is None:
338
+ result = axes
339
+
340
+ byline = by[0] if len(by) == 1 else by
341
+ fig.suptitle(f"Boxplot grouped by {byline}")
342
+ maybe_adjust_figure(fig, bottom=0.15, top=0.9, left=0.1, right=0.9, wspace=0.2)
343
+
344
+ return result
345
+
346
+
347
+ def boxplot(
348
+ data,
349
+ column=None,
350
+ by=None,
351
+ ax=None,
352
+ fontsize: int | None = None,
353
+ rot: int = 0,
354
+ grid: bool = True,
355
+ figsize: tuple[float, float] | None = None,
356
+ layout=None,
357
+ return_type=None,
358
+ **kwds,
359
+ ):
360
+ import matplotlib.pyplot as plt
361
+
362
+ # validate return_type:
363
+ if return_type not in BoxPlot._valid_return_types:
364
+ raise ValueError("return_type must be {'axes', 'dict', 'both'}")
365
+
366
+ if isinstance(data, ABCSeries):
367
+ data = data.to_frame("x")
368
+ column = "x"
369
+
370
+ def _get_colors():
371
+ # num_colors=3 is required as method maybe_color_bp takes the colors
372
+ # in positions 0 and 2.
373
+ # if colors not provided, use same defaults as DataFrame.plot.box
374
+ result = get_standard_colors(num_colors=3)
375
+ result = np.take(result, [0, 0, 2])
376
+ result = np.append(result, "k")
377
+
378
+ colors = kwds.pop("color", None)
379
+ if colors:
380
+ if is_dict_like(colors):
381
+ # replace colors in result array with user-specified colors
382
+ # taken from the colors dict parameter
383
+ # "boxes" value placed in position 0, "whiskers" in 1, etc.
384
+ valid_keys = ["boxes", "whiskers", "medians", "caps"]
385
+ key_to_index = dict(zip(valid_keys, range(4)))
386
+ for key, value in colors.items():
387
+ if key in valid_keys:
388
+ result[key_to_index[key]] = value
389
+ else:
390
+ raise ValueError(
391
+ f"color dict contains invalid key '{key}'. "
392
+ f"The key must be either {valid_keys}"
393
+ )
394
+ else:
395
+ result.fill(colors)
396
+
397
+ return result
398
+
399
+ def plot_group(keys, values, ax: Axes, **kwds):
400
+ # GH 45465: xlabel/ylabel need to be popped out before plotting happens
401
+ xlabel, ylabel = kwds.pop("xlabel", None), kwds.pop("ylabel", None)
402
+ if xlabel:
403
+ ax.set_xlabel(pprint_thing(xlabel))
404
+ if ylabel:
405
+ ax.set_ylabel(pprint_thing(ylabel))
406
+
407
+ keys = [pprint_thing(x) for x in keys]
408
+ values = [np.asarray(remove_na_arraylike(v), dtype=object) for v in values]
409
+ bp = ax.boxplot(values, **kwds)
410
+ if fontsize is not None:
411
+ ax.tick_params(axis="both", labelsize=fontsize)
412
+
413
+ # GH 45465: x/y are flipped when "vert" changes
414
+ _set_ticklabels(
415
+ ax=ax, labels=keys, is_vertical=kwds.get("vert", True), rotation=rot
416
+ )
417
+ maybe_color_bp(bp, color_tup=colors, **kwds)
418
+
419
+ # Return axes in multiplot case, maybe revisit later # 985
420
+ if return_type == "dict":
421
+ return bp
422
+ elif return_type == "both":
423
+ return BoxPlot.BP(ax=ax, lines=bp)
424
+ else:
425
+ return ax
426
+
427
+ colors = _get_colors()
428
+ if column is None:
429
+ columns = None
430
+ elif isinstance(column, (list, tuple)):
431
+ columns = column
432
+ else:
433
+ columns = [column]
434
+
435
+ if by is not None:
436
+ # Prefer array return type for 2-D plots to match the subplot layout
437
+ # https://github.com/pandas-dev/pandas/pull/12216#issuecomment-241175580
438
+ result = _grouped_plot_by_column(
439
+ plot_group,
440
+ data,
441
+ columns=columns,
442
+ by=by,
443
+ grid=grid,
444
+ figsize=figsize,
445
+ ax=ax,
446
+ layout=layout,
447
+ return_type=return_type,
448
+ **kwds,
449
+ )
450
+ else:
451
+ if return_type is None:
452
+ return_type = "axes"
453
+ if layout is not None:
454
+ raise ValueError("The 'layout' keyword is not supported when 'by' is None")
455
+
456
+ if ax is None:
457
+ rc = {"figure.figsize": figsize} if figsize is not None else {}
458
+ with plt.rc_context(rc):
459
+ ax = plt.gca()
460
+ data = data._get_numeric_data()
461
+ naxes = len(data.columns)
462
+ if naxes == 0:
463
+ raise ValueError(
464
+ "boxplot method requires numerical columns, nothing to plot."
465
+ )
466
+ if columns is None:
467
+ columns = data.columns
468
+ else:
469
+ data = data[columns]
470
+
471
+ result = plot_group(columns, data.values.T, ax, **kwds)
472
+ ax.grid(grid)
473
+
474
+ return result
475
+
476
+
477
+ def boxplot_frame(
478
+ self,
479
+ column=None,
480
+ by=None,
481
+ ax=None,
482
+ fontsize: int | None = None,
483
+ rot: int = 0,
484
+ grid: bool = True,
485
+ figsize: tuple[float, float] | None = None,
486
+ layout=None,
487
+ return_type=None,
488
+ **kwds,
489
+ ):
490
+ import matplotlib.pyplot as plt
491
+
492
+ ax = boxplot(
493
+ self,
494
+ column=column,
495
+ by=by,
496
+ ax=ax,
497
+ fontsize=fontsize,
498
+ grid=grid,
499
+ rot=rot,
500
+ figsize=figsize,
501
+ layout=layout,
502
+ return_type=return_type,
503
+ **kwds,
504
+ )
505
+ plt.draw_if_interactive()
506
+ return ax
507
+
508
+
509
+ def boxplot_frame_groupby(
510
+ grouped,
511
+ subplots: bool = True,
512
+ column=None,
513
+ fontsize: int | None = None,
514
+ rot: int = 0,
515
+ grid: bool = True,
516
+ ax=None,
517
+ figsize: tuple[float, float] | None = None,
518
+ layout=None,
519
+ sharex: bool = False,
520
+ sharey: bool = True,
521
+ **kwds,
522
+ ):
523
+ if subplots is True:
524
+ naxes = len(grouped)
525
+ fig, axes = create_subplots(
526
+ naxes=naxes,
527
+ squeeze=False,
528
+ ax=ax,
529
+ sharex=sharex,
530
+ sharey=sharey,
531
+ figsize=figsize,
532
+ layout=layout,
533
+ )
534
+ axes = flatten_axes(axes)
535
+
536
+ ret = pd.Series(dtype=object)
537
+
538
+ for (key, group), ax in zip(grouped, axes):
539
+ d = group.boxplot(
540
+ ax=ax, column=column, fontsize=fontsize, rot=rot, grid=grid, **kwds
541
+ )
542
+ ax.set_title(pprint_thing(key))
543
+ ret.loc[key] = d
544
+ maybe_adjust_figure(fig, bottom=0.15, top=0.9, left=0.1, right=0.9, wspace=0.2)
545
+ else:
546
+ keys, frames = zip(*grouped)
547
+ if grouped.axis == 0:
548
+ df = pd.concat(frames, keys=keys, axis=1)
549
+ elif len(frames) > 1:
550
+ df = frames[0].join(frames[1::])
551
+ else:
552
+ df = frames[0]
553
+
554
+ # GH 16748, DataFrameGroupby fails when subplots=False and `column` argument
555
+ # is assigned, and in this case, since `df` here becomes MI after groupby,
556
+ # so we need to couple the keys (grouped values) and column (original df
557
+ # column) together to search for subset to plot
558
+ if column is not None:
559
+ column = com.convert_to_list_like(column)
560
+ multi_key = pd.MultiIndex.from_product([keys, column])
561
+ column = list(multi_key.values)
562
+ ret = df.boxplot(
563
+ column=column,
564
+ fontsize=fontsize,
565
+ rot=rot,
566
+ grid=grid,
567
+ ax=ax,
568
+ figsize=figsize,
569
+ layout=layout,
570
+ **kwds,
571
+ )
572
+ return ret
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/converter.py ADDED
@@ -0,0 +1,1139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import contextlib
4
+ import datetime as pydt
5
+ from datetime import (
6
+ datetime,
7
+ timedelta,
8
+ tzinfo,
9
+ )
10
+ import functools
11
+ from typing import (
12
+ TYPE_CHECKING,
13
+ Any,
14
+ cast,
15
+ )
16
+ import warnings
17
+
18
+ import matplotlib.dates as mdates
19
+ from matplotlib.ticker import (
20
+ AutoLocator,
21
+ Formatter,
22
+ Locator,
23
+ )
24
+ from matplotlib.transforms import nonsingular
25
+ import matplotlib.units as munits
26
+ import numpy as np
27
+
28
+ from pandas._libs import lib
29
+ from pandas._libs.tslibs import (
30
+ Timestamp,
31
+ to_offset,
32
+ )
33
+ from pandas._libs.tslibs.dtypes import (
34
+ FreqGroup,
35
+ periods_per_day,
36
+ )
37
+ from pandas._typing import (
38
+ F,
39
+ npt,
40
+ )
41
+
42
+ from pandas.core.dtypes.common import (
43
+ is_float,
44
+ is_float_dtype,
45
+ is_integer,
46
+ is_integer_dtype,
47
+ is_nested_list_like,
48
+ )
49
+
50
+ from pandas import (
51
+ Index,
52
+ Series,
53
+ get_option,
54
+ )
55
+ import pandas.core.common as com
56
+ from pandas.core.indexes.datetimes import date_range
57
+ from pandas.core.indexes.period import (
58
+ Period,
59
+ PeriodIndex,
60
+ period_range,
61
+ )
62
+ import pandas.core.tools.datetimes as tools
63
+
64
+ if TYPE_CHECKING:
65
+ from collections.abc import Generator
66
+
67
+ from matplotlib.axis import Axis
68
+
69
+ from pandas._libs.tslibs.offsets import BaseOffset
70
+
71
+
72
+ _mpl_units = {} # Cache for units overwritten by us
73
+
74
+
75
+ def get_pairs():
76
+ pairs = [
77
+ (Timestamp, DatetimeConverter),
78
+ (Period, PeriodConverter),
79
+ (pydt.datetime, DatetimeConverter),
80
+ (pydt.date, DatetimeConverter),
81
+ (pydt.time, TimeConverter),
82
+ (np.datetime64, DatetimeConverter),
83
+ ]
84
+ return pairs
85
+
86
+
87
+ def register_pandas_matplotlib_converters(func: F) -> F:
88
+ """
89
+ Decorator applying pandas_converters.
90
+ """
91
+
92
+ @functools.wraps(func)
93
+ def wrapper(*args, **kwargs):
94
+ with pandas_converters():
95
+ return func(*args, **kwargs)
96
+
97
+ return cast(F, wrapper)
98
+
99
+
100
+ @contextlib.contextmanager
101
+ def pandas_converters() -> Generator[None, None, None]:
102
+ """
103
+ Context manager registering pandas' converters for a plot.
104
+
105
+ See Also
106
+ --------
107
+ register_pandas_matplotlib_converters : Decorator that applies this.
108
+ """
109
+ value = get_option("plotting.matplotlib.register_converters")
110
+
111
+ if value:
112
+ # register for True or "auto"
113
+ register()
114
+ try:
115
+ yield
116
+ finally:
117
+ if value == "auto":
118
+ # only deregister for "auto"
119
+ deregister()
120
+
121
+
122
+ def register() -> None:
123
+ pairs = get_pairs()
124
+ for type_, cls in pairs:
125
+ # Cache previous converter if present
126
+ if type_ in munits.registry and not isinstance(munits.registry[type_], cls):
127
+ previous = munits.registry[type_]
128
+ _mpl_units[type_] = previous
129
+ # Replace with pandas converter
130
+ munits.registry[type_] = cls()
131
+
132
+
133
+ def deregister() -> None:
134
+ # Renamed in pandas.plotting.__init__
135
+ for type_, cls in get_pairs():
136
+ # We use type to catch our classes directly, no inheritance
137
+ if type(munits.registry.get(type_)) is cls:
138
+ munits.registry.pop(type_)
139
+
140
+ # restore the old keys
141
+ for unit, formatter in _mpl_units.items():
142
+ if type(formatter) not in {DatetimeConverter, PeriodConverter, TimeConverter}:
143
+ # make it idempotent by excluding ours.
144
+ munits.registry[unit] = formatter
145
+
146
+
147
+ def _to_ordinalf(tm: pydt.time) -> float:
148
+ tot_sec = tm.hour * 3600 + tm.minute * 60 + tm.second + tm.microsecond / 10**6
149
+ return tot_sec
150
+
151
+
152
+ def time2num(d):
153
+ if isinstance(d, str):
154
+ parsed = Timestamp(d)
155
+ return _to_ordinalf(parsed.time())
156
+ if isinstance(d, pydt.time):
157
+ return _to_ordinalf(d)
158
+ return d
159
+
160
+
161
+ class TimeConverter(munits.ConversionInterface):
162
+ @staticmethod
163
+ def convert(value, unit, axis):
164
+ valid_types = (str, pydt.time)
165
+ if isinstance(value, valid_types) or is_integer(value) or is_float(value):
166
+ return time2num(value)
167
+ if isinstance(value, Index):
168
+ return value.map(time2num)
169
+ if isinstance(value, (list, tuple, np.ndarray, Index)):
170
+ return [time2num(x) for x in value]
171
+ return value
172
+
173
+ @staticmethod
174
+ def axisinfo(unit, axis) -> munits.AxisInfo | None:
175
+ if unit != "time":
176
+ return None
177
+
178
+ majloc = AutoLocator()
179
+ majfmt = TimeFormatter(majloc)
180
+ return munits.AxisInfo(majloc=majloc, majfmt=majfmt, label="time")
181
+
182
+ @staticmethod
183
+ def default_units(x, axis) -> str:
184
+ return "time"
185
+
186
+
187
+ # time formatter
188
+ class TimeFormatter(Formatter):
189
+ def __init__(self, locs) -> None:
190
+ self.locs = locs
191
+
192
+ def __call__(self, x, pos: int | None = 0) -> str:
193
+ """
194
+ Return the time of day as a formatted string.
195
+
196
+ Parameters
197
+ ----------
198
+ x : float
199
+ The time of day specified as seconds since 00:00 (midnight),
200
+ with up to microsecond precision.
201
+ pos
202
+ Unused
203
+
204
+ Returns
205
+ -------
206
+ str
207
+ A string in HH:MM:SS.mmmuuu format. Microseconds,
208
+ milliseconds and seconds are only displayed if non-zero.
209
+ """
210
+ fmt = "%H:%M:%S.%f"
211
+ s = int(x)
212
+ msus = round((x - s) * 10**6)
213
+ ms = msus // 1000
214
+ us = msus % 1000
215
+ m, s = divmod(s, 60)
216
+ h, m = divmod(m, 60)
217
+ _, h = divmod(h, 24)
218
+ if us != 0:
219
+ return pydt.time(h, m, s, msus).strftime(fmt)
220
+ elif ms != 0:
221
+ return pydt.time(h, m, s, msus).strftime(fmt)[:-3]
222
+ elif s != 0:
223
+ return pydt.time(h, m, s).strftime("%H:%M:%S")
224
+
225
+ return pydt.time(h, m).strftime("%H:%M")
226
+
227
+
228
+ # Period Conversion
229
+
230
+
231
+ class PeriodConverter(mdates.DateConverter):
232
+ @staticmethod
233
+ def convert(values, units, axis):
234
+ if is_nested_list_like(values):
235
+ values = [PeriodConverter._convert_1d(v, units, axis) for v in values]
236
+ else:
237
+ values = PeriodConverter._convert_1d(values, units, axis)
238
+ return values
239
+
240
+ @staticmethod
241
+ def _convert_1d(values, units, axis):
242
+ if not hasattr(axis, "freq"):
243
+ raise TypeError("Axis must have `freq` set to convert to Periods")
244
+ valid_types = (str, datetime, Period, pydt.date, pydt.time, np.datetime64)
245
+ with warnings.catch_warnings():
246
+ warnings.filterwarnings(
247
+ "ignore", "Period with BDay freq is deprecated", category=FutureWarning
248
+ )
249
+ warnings.filterwarnings(
250
+ "ignore", r"PeriodDtype\[B\] is deprecated", category=FutureWarning
251
+ )
252
+ if (
253
+ isinstance(values, valid_types)
254
+ or is_integer(values)
255
+ or is_float(values)
256
+ ):
257
+ return get_datevalue(values, axis.freq)
258
+ elif isinstance(values, PeriodIndex):
259
+ return values.asfreq(axis.freq).asi8
260
+ elif isinstance(values, Index):
261
+ return values.map(lambda x: get_datevalue(x, axis.freq))
262
+ elif lib.infer_dtype(values, skipna=False) == "period":
263
+ # https://github.com/pandas-dev/pandas/issues/24304
264
+ # convert ndarray[period] -> PeriodIndex
265
+ return PeriodIndex(values, freq=axis.freq).asi8
266
+ elif isinstance(values, (list, tuple, np.ndarray, Index)):
267
+ return [get_datevalue(x, axis.freq) for x in values]
268
+ return values
269
+
270
+
271
+ def get_datevalue(date, freq):
272
+ if isinstance(date, Period):
273
+ return date.asfreq(freq).ordinal
274
+ elif isinstance(date, (str, datetime, pydt.date, pydt.time, np.datetime64)):
275
+ return Period(date, freq).ordinal
276
+ elif (
277
+ is_integer(date)
278
+ or is_float(date)
279
+ or (isinstance(date, (np.ndarray, Index)) and (date.size == 1))
280
+ ):
281
+ return date
282
+ elif date is None:
283
+ return None
284
+ raise ValueError(f"Unrecognizable date '{date}'")
285
+
286
+
287
+ # Datetime Conversion
288
+ class DatetimeConverter(mdates.DateConverter):
289
+ @staticmethod
290
+ def convert(values, unit, axis):
291
+ # values might be a 1-d array, or a list-like of arrays.
292
+ if is_nested_list_like(values):
293
+ values = [DatetimeConverter._convert_1d(v, unit, axis) for v in values]
294
+ else:
295
+ values = DatetimeConverter._convert_1d(values, unit, axis)
296
+ return values
297
+
298
+ @staticmethod
299
+ def _convert_1d(values, unit, axis):
300
+ def try_parse(values):
301
+ try:
302
+ return mdates.date2num(tools.to_datetime(values))
303
+ except Exception:
304
+ return values
305
+
306
+ if isinstance(values, (datetime, pydt.date, np.datetime64, pydt.time)):
307
+ return mdates.date2num(values)
308
+ elif is_integer(values) or is_float(values):
309
+ return values
310
+ elif isinstance(values, str):
311
+ return try_parse(values)
312
+ elif isinstance(values, (list, tuple, np.ndarray, Index, Series)):
313
+ if isinstance(values, Series):
314
+ # https://github.com/matplotlib/matplotlib/issues/11391
315
+ # Series was skipped. Convert to DatetimeIndex to get asi8
316
+ values = Index(values)
317
+ if isinstance(values, Index):
318
+ values = values.values
319
+ if not isinstance(values, np.ndarray):
320
+ values = com.asarray_tuplesafe(values)
321
+
322
+ if is_integer_dtype(values) or is_float_dtype(values):
323
+ return values
324
+
325
+ try:
326
+ values = tools.to_datetime(values)
327
+ except Exception:
328
+ pass
329
+
330
+ values = mdates.date2num(values)
331
+
332
+ return values
333
+
334
+ @staticmethod
335
+ def axisinfo(unit: tzinfo | None, axis) -> munits.AxisInfo:
336
+ """
337
+ Return the :class:`~matplotlib.units.AxisInfo` for *unit*.
338
+
339
+ *unit* is a tzinfo instance or None.
340
+ The *axis* argument is required but not used.
341
+ """
342
+ tz = unit
343
+
344
+ majloc = PandasAutoDateLocator(tz=tz)
345
+ majfmt = PandasAutoDateFormatter(majloc, tz=tz)
346
+ datemin = pydt.date(2000, 1, 1)
347
+ datemax = pydt.date(2010, 1, 1)
348
+
349
+ return munits.AxisInfo(
350
+ majloc=majloc, majfmt=majfmt, label="", default_limits=(datemin, datemax)
351
+ )
352
+
353
+
354
+ class PandasAutoDateFormatter(mdates.AutoDateFormatter):
355
+ def __init__(self, locator, tz=None, defaultfmt: str = "%Y-%m-%d") -> None:
356
+ mdates.AutoDateFormatter.__init__(self, locator, tz, defaultfmt)
357
+
358
+
359
+ class PandasAutoDateLocator(mdates.AutoDateLocator):
360
+ def get_locator(self, dmin, dmax):
361
+ """Pick the best locator based on a distance."""
362
+ tot_sec = (dmax - dmin).total_seconds()
363
+
364
+ if abs(tot_sec) < self.minticks:
365
+ self._freq = -1
366
+ locator = MilliSecondLocator(self.tz)
367
+ locator.set_axis(self.axis)
368
+
369
+ # error: Item "None" of "Axis | _DummyAxis | _AxisWrapper | None"
370
+ # has no attribute "get_data_interval"
371
+ locator.axis.set_view_interval( # type: ignore[union-attr]
372
+ *self.axis.get_view_interval() # type: ignore[union-attr]
373
+ )
374
+ locator.axis.set_data_interval( # type: ignore[union-attr]
375
+ *self.axis.get_data_interval() # type: ignore[union-attr]
376
+ )
377
+ return locator
378
+
379
+ return mdates.AutoDateLocator.get_locator(self, dmin, dmax)
380
+
381
+ def _get_unit(self):
382
+ return MilliSecondLocator.get_unit_generic(self._freq)
383
+
384
+
385
+ class MilliSecondLocator(mdates.DateLocator):
386
+ UNIT = 1.0 / (24 * 3600 * 1000)
387
+
388
+ def __init__(self, tz) -> None:
389
+ mdates.DateLocator.__init__(self, tz)
390
+ self._interval = 1.0
391
+
392
+ def _get_unit(self):
393
+ return self.get_unit_generic(-1)
394
+
395
+ @staticmethod
396
+ def get_unit_generic(freq):
397
+ unit = mdates.RRuleLocator.get_unit_generic(freq)
398
+ if unit < 0:
399
+ return MilliSecondLocator.UNIT
400
+ return unit
401
+
402
+ def __call__(self):
403
+ # if no data have been set, this will tank with a ValueError
404
+ try:
405
+ dmin, dmax = self.viewlim_to_dt()
406
+ except ValueError:
407
+ return []
408
+
409
+ # We need to cap at the endpoints of valid datetime
410
+ nmax, nmin = mdates.date2num((dmax, dmin))
411
+
412
+ num = (nmax - nmin) * 86400 * 1000
413
+ max_millis_ticks = 6
414
+ for interval in [1, 10, 50, 100, 200, 500]:
415
+ if num <= interval * (max_millis_ticks - 1):
416
+ self._interval = interval
417
+ break
418
+ # We went through the whole loop without breaking, default to 1
419
+ self._interval = 1000.0
420
+
421
+ estimate = (nmax - nmin) / (self._get_unit() * self._get_interval())
422
+
423
+ if estimate > self.MAXTICKS * 2:
424
+ raise RuntimeError(
425
+ "MillisecondLocator estimated to generate "
426
+ f"{estimate:d} ticks from {dmin} to {dmax}: exceeds Locator.MAXTICKS"
427
+ f"* 2 ({self.MAXTICKS * 2:d}) "
428
+ )
429
+
430
+ interval = self._get_interval()
431
+ freq = f"{interval}ms"
432
+ tz = self.tz.tzname(None)
433
+ st = dmin.replace(tzinfo=None)
434
+ ed = dmin.replace(tzinfo=None)
435
+ all_dates = date_range(start=st, end=ed, freq=freq, tz=tz).astype(object)
436
+
437
+ try:
438
+ if len(all_dates) > 0:
439
+ locs = self.raise_if_exceeds(mdates.date2num(all_dates))
440
+ return locs
441
+ except Exception: # pragma: no cover
442
+ pass
443
+
444
+ lims = mdates.date2num([dmin, dmax])
445
+ return lims
446
+
447
+ def _get_interval(self):
448
+ return self._interval
449
+
450
+ def autoscale(self):
451
+ """
452
+ Set the view limits to include the data range.
453
+ """
454
+ # We need to cap at the endpoints of valid datetime
455
+ dmin, dmax = self.datalim_to_dt()
456
+
457
+ vmin = mdates.date2num(dmin)
458
+ vmax = mdates.date2num(dmax)
459
+
460
+ return self.nonsingular(vmin, vmax)
461
+
462
+
463
+ def _from_ordinal(x, tz: tzinfo | None = None) -> datetime:
464
+ ix = int(x)
465
+ dt = datetime.fromordinal(ix)
466
+ remainder = float(x) - ix
467
+ hour, remainder = divmod(24 * remainder, 1)
468
+ minute, remainder = divmod(60 * remainder, 1)
469
+ second, remainder = divmod(60 * remainder, 1)
470
+ microsecond = int(1_000_000 * remainder)
471
+ if microsecond < 10:
472
+ microsecond = 0 # compensate for rounding errors
473
+ dt = datetime(
474
+ dt.year, dt.month, dt.day, int(hour), int(minute), int(second), microsecond
475
+ )
476
+ if tz is not None:
477
+ dt = dt.astimezone(tz)
478
+
479
+ if microsecond > 999990: # compensate for rounding errors
480
+ dt += timedelta(microseconds=1_000_000 - microsecond)
481
+
482
+ return dt
483
+
484
+
485
+ # Fixed frequency dynamic tick locators and formatters
486
+
487
+ # -------------------------------------------------------------------------
488
+ # --- Locators ---
489
+ # -------------------------------------------------------------------------
490
+
491
+
492
+ def _get_default_annual_spacing(nyears) -> tuple[int, int]:
493
+ """
494
+ Returns a default spacing between consecutive ticks for annual data.
495
+ """
496
+ if nyears < 11:
497
+ (min_spacing, maj_spacing) = (1, 1)
498
+ elif nyears < 20:
499
+ (min_spacing, maj_spacing) = (1, 2)
500
+ elif nyears < 50:
501
+ (min_spacing, maj_spacing) = (1, 5)
502
+ elif nyears < 100:
503
+ (min_spacing, maj_spacing) = (5, 10)
504
+ elif nyears < 200:
505
+ (min_spacing, maj_spacing) = (5, 25)
506
+ elif nyears < 600:
507
+ (min_spacing, maj_spacing) = (10, 50)
508
+ else:
509
+ factor = nyears // 1000 + 1
510
+ (min_spacing, maj_spacing) = (factor * 20, factor * 100)
511
+ return (min_spacing, maj_spacing)
512
+
513
+
514
+ def _period_break(dates: PeriodIndex, period: str) -> npt.NDArray[np.intp]:
515
+ """
516
+ Returns the indices where the given period changes.
517
+
518
+ Parameters
519
+ ----------
520
+ dates : PeriodIndex
521
+ Array of intervals to monitor.
522
+ period : str
523
+ Name of the period to monitor.
524
+ """
525
+ mask = _period_break_mask(dates, period)
526
+ return np.nonzero(mask)[0]
527
+
528
+
529
+ def _period_break_mask(dates: PeriodIndex, period: str) -> npt.NDArray[np.bool_]:
530
+ current = getattr(dates, period)
531
+ previous = getattr(dates - 1 * dates.freq, period)
532
+ return current != previous
533
+
534
+
535
+ def has_level_label(label_flags: npt.NDArray[np.intp], vmin: float) -> bool:
536
+ """
537
+ Returns true if the ``label_flags`` indicate there is at least one label
538
+ for this level.
539
+
540
+ if the minimum view limit is not an exact integer, then the first tick
541
+ label won't be shown, so we must adjust for that.
542
+ """
543
+ if label_flags.size == 0 or (
544
+ label_flags.size == 1 and label_flags[0] == 0 and vmin % 1 > 0.0
545
+ ):
546
+ return False
547
+ else:
548
+ return True
549
+
550
+
551
+ def _get_periods_per_ymd(freq: BaseOffset) -> tuple[int, int, int]:
552
+ # error: "BaseOffset" has no attribute "_period_dtype_code"
553
+ dtype_code = freq._period_dtype_code # type: ignore[attr-defined]
554
+ freq_group = FreqGroup.from_period_dtype_code(dtype_code)
555
+
556
+ ppd = -1 # placeholder for above-day freqs
557
+
558
+ if dtype_code >= FreqGroup.FR_HR.value:
559
+ # error: "BaseOffset" has no attribute "_creso"
560
+ ppd = periods_per_day(freq._creso) # type: ignore[attr-defined]
561
+ ppm = 28 * ppd
562
+ ppy = 365 * ppd
563
+ elif freq_group == FreqGroup.FR_BUS:
564
+ ppm = 19
565
+ ppy = 261
566
+ elif freq_group == FreqGroup.FR_DAY:
567
+ ppm = 28
568
+ ppy = 365
569
+ elif freq_group == FreqGroup.FR_WK:
570
+ ppm = 3
571
+ ppy = 52
572
+ elif freq_group == FreqGroup.FR_MTH:
573
+ ppm = 1
574
+ ppy = 12
575
+ elif freq_group == FreqGroup.FR_QTR:
576
+ ppm = -1 # placerholder
577
+ ppy = 4
578
+ elif freq_group == FreqGroup.FR_ANN:
579
+ ppm = -1 # placeholder
580
+ ppy = 1
581
+ else:
582
+ raise NotImplementedError(f"Unsupported frequency: {dtype_code}")
583
+
584
+ return ppd, ppm, ppy
585
+
586
+
587
+ @functools.cache
588
+ def _daily_finder(vmin: float, vmax: float, freq: BaseOffset) -> np.ndarray:
589
+ # error: "BaseOffset" has no attribute "_period_dtype_code"
590
+ dtype_code = freq._period_dtype_code # type: ignore[attr-defined]
591
+
592
+ periodsperday, periodspermonth, periodsperyear = _get_periods_per_ymd(freq)
593
+
594
+ # save this for later usage
595
+ vmin_orig = vmin
596
+ (vmin, vmax) = (int(vmin), int(vmax))
597
+ span = vmax - vmin + 1
598
+
599
+ with warnings.catch_warnings():
600
+ warnings.filterwarnings(
601
+ "ignore", "Period with BDay freq is deprecated", category=FutureWarning
602
+ )
603
+ warnings.filterwarnings(
604
+ "ignore", r"PeriodDtype\[B\] is deprecated", category=FutureWarning
605
+ )
606
+ dates_ = period_range(
607
+ start=Period(ordinal=vmin, freq=freq),
608
+ end=Period(ordinal=vmax, freq=freq),
609
+ freq=freq,
610
+ )
611
+
612
+ # Initialize the output
613
+ info = np.zeros(
614
+ span, dtype=[("val", np.int64), ("maj", bool), ("min", bool), ("fmt", "|S20")]
615
+ )
616
+ info["val"][:] = dates_.asi8
617
+ info["fmt"][:] = ""
618
+ info["maj"][[0, -1]] = True
619
+ # .. and set some shortcuts
620
+ info_maj = info["maj"]
621
+ info_min = info["min"]
622
+ info_fmt = info["fmt"]
623
+
624
+ def first_label(label_flags):
625
+ if (label_flags[0] == 0) and (label_flags.size > 1) and ((vmin_orig % 1) > 0.0):
626
+ return label_flags[1]
627
+ else:
628
+ return label_flags[0]
629
+
630
+ # Case 1. Less than a month
631
+ if span <= periodspermonth:
632
+ day_start = _period_break(dates_, "day")
633
+ month_start = _period_break(dates_, "month")
634
+ year_start = _period_break(dates_, "year")
635
+
636
+ def _hour_finder(label_interval: int, force_year_start: bool) -> None:
637
+ target = dates_.hour
638
+ mask = _period_break_mask(dates_, "hour")
639
+ info_maj[day_start] = True
640
+ info_min[mask & (target % label_interval == 0)] = True
641
+ info_fmt[mask & (target % label_interval == 0)] = "%H:%M"
642
+ info_fmt[day_start] = "%H:%M\n%d-%b"
643
+ info_fmt[year_start] = "%H:%M\n%d-%b\n%Y"
644
+ if force_year_start and not has_level_label(year_start, vmin_orig):
645
+ info_fmt[first_label(day_start)] = "%H:%M\n%d-%b\n%Y"
646
+
647
+ def _minute_finder(label_interval: int) -> None:
648
+ target = dates_.minute
649
+ hour_start = _period_break(dates_, "hour")
650
+ mask = _period_break_mask(dates_, "minute")
651
+ info_maj[hour_start] = True
652
+ info_min[mask & (target % label_interval == 0)] = True
653
+ info_fmt[mask & (target % label_interval == 0)] = "%H:%M"
654
+ info_fmt[day_start] = "%H:%M\n%d-%b"
655
+ info_fmt[year_start] = "%H:%M\n%d-%b\n%Y"
656
+
657
+ def _second_finder(label_interval: int) -> None:
658
+ target = dates_.second
659
+ minute_start = _period_break(dates_, "minute")
660
+ mask = _period_break_mask(dates_, "second")
661
+ info_maj[minute_start] = True
662
+ info_min[mask & (target % label_interval == 0)] = True
663
+ info_fmt[mask & (target % label_interval == 0)] = "%H:%M:%S"
664
+ info_fmt[day_start] = "%H:%M:%S\n%d-%b"
665
+ info_fmt[year_start] = "%H:%M:%S\n%d-%b\n%Y"
666
+
667
+ if span < periodsperday / 12000:
668
+ _second_finder(1)
669
+ elif span < periodsperday / 6000:
670
+ _second_finder(2)
671
+ elif span < periodsperday / 2400:
672
+ _second_finder(5)
673
+ elif span < periodsperday / 1200:
674
+ _second_finder(10)
675
+ elif span < periodsperday / 800:
676
+ _second_finder(15)
677
+ elif span < periodsperday / 400:
678
+ _second_finder(30)
679
+ elif span < periodsperday / 150:
680
+ _minute_finder(1)
681
+ elif span < periodsperday / 70:
682
+ _minute_finder(2)
683
+ elif span < periodsperday / 24:
684
+ _minute_finder(5)
685
+ elif span < periodsperday / 12:
686
+ _minute_finder(15)
687
+ elif span < periodsperday / 6:
688
+ _minute_finder(30)
689
+ elif span < periodsperday / 2.5:
690
+ _hour_finder(1, False)
691
+ elif span < periodsperday / 1.5:
692
+ _hour_finder(2, False)
693
+ elif span < periodsperday * 1.25:
694
+ _hour_finder(3, False)
695
+ elif span < periodsperday * 2.5:
696
+ _hour_finder(6, True)
697
+ elif span < periodsperday * 4:
698
+ _hour_finder(12, True)
699
+ else:
700
+ info_maj[month_start] = True
701
+ info_min[day_start] = True
702
+ info_fmt[day_start] = "%d"
703
+ info_fmt[month_start] = "%d\n%b"
704
+ info_fmt[year_start] = "%d\n%b\n%Y"
705
+ if not has_level_label(year_start, vmin_orig):
706
+ if not has_level_label(month_start, vmin_orig):
707
+ info_fmt[first_label(day_start)] = "%d\n%b\n%Y"
708
+ else:
709
+ info_fmt[first_label(month_start)] = "%d\n%b\n%Y"
710
+
711
+ # Case 2. Less than three months
712
+ elif span <= periodsperyear // 4:
713
+ month_start = _period_break(dates_, "month")
714
+ info_maj[month_start] = True
715
+ if dtype_code < FreqGroup.FR_HR.value:
716
+ info["min"] = True
717
+ else:
718
+ day_start = _period_break(dates_, "day")
719
+ info["min"][day_start] = True
720
+ week_start = _period_break(dates_, "week")
721
+ year_start = _period_break(dates_, "year")
722
+ info_fmt[week_start] = "%d"
723
+ info_fmt[month_start] = "\n\n%b"
724
+ info_fmt[year_start] = "\n\n%b\n%Y"
725
+ if not has_level_label(year_start, vmin_orig):
726
+ if not has_level_label(month_start, vmin_orig):
727
+ info_fmt[first_label(week_start)] = "\n\n%b\n%Y"
728
+ else:
729
+ info_fmt[first_label(month_start)] = "\n\n%b\n%Y"
730
+ # Case 3. Less than 14 months ...............
731
+ elif span <= 1.15 * periodsperyear:
732
+ year_start = _period_break(dates_, "year")
733
+ month_start = _period_break(dates_, "month")
734
+ week_start = _period_break(dates_, "week")
735
+ info_maj[month_start] = True
736
+ info_min[week_start] = True
737
+ info_min[year_start] = False
738
+ info_min[month_start] = False
739
+ info_fmt[month_start] = "%b"
740
+ info_fmt[year_start] = "%b\n%Y"
741
+ if not has_level_label(year_start, vmin_orig):
742
+ info_fmt[first_label(month_start)] = "%b\n%Y"
743
+ # Case 4. Less than 2.5 years ...............
744
+ elif span <= 2.5 * periodsperyear:
745
+ year_start = _period_break(dates_, "year")
746
+ quarter_start = _period_break(dates_, "quarter")
747
+ month_start = _period_break(dates_, "month")
748
+ info_maj[quarter_start] = True
749
+ info_min[month_start] = True
750
+ info_fmt[quarter_start] = "%b"
751
+ info_fmt[year_start] = "%b\n%Y"
752
+ # Case 4. Less than 4 years .................
753
+ elif span <= 4 * periodsperyear:
754
+ year_start = _period_break(dates_, "year")
755
+ month_start = _period_break(dates_, "month")
756
+ info_maj[year_start] = True
757
+ info_min[month_start] = True
758
+ info_min[year_start] = False
759
+
760
+ month_break = dates_[month_start].month
761
+ jan_or_jul = month_start[(month_break == 1) | (month_break == 7)]
762
+ info_fmt[jan_or_jul] = "%b"
763
+ info_fmt[year_start] = "%b\n%Y"
764
+ # Case 5. Less than 11 years ................
765
+ elif span <= 11 * periodsperyear:
766
+ year_start = _period_break(dates_, "year")
767
+ quarter_start = _period_break(dates_, "quarter")
768
+ info_maj[year_start] = True
769
+ info_min[quarter_start] = True
770
+ info_min[year_start] = False
771
+ info_fmt[year_start] = "%Y"
772
+ # Case 6. More than 12 years ................
773
+ else:
774
+ year_start = _period_break(dates_, "year")
775
+ year_break = dates_[year_start].year
776
+ nyears = span / periodsperyear
777
+ (min_anndef, maj_anndef) = _get_default_annual_spacing(nyears)
778
+ major_idx = year_start[(year_break % maj_anndef == 0)]
779
+ info_maj[major_idx] = True
780
+ minor_idx = year_start[(year_break % min_anndef == 0)]
781
+ info_min[minor_idx] = True
782
+ info_fmt[major_idx] = "%Y"
783
+
784
+ return info
785
+
786
+
787
+ @functools.cache
788
+ def _monthly_finder(vmin: float, vmax: float, freq: BaseOffset) -> np.ndarray:
789
+ _, _, periodsperyear = _get_periods_per_ymd(freq)
790
+
791
+ vmin_orig = vmin
792
+ (vmin, vmax) = (int(vmin), int(vmax))
793
+ span = vmax - vmin + 1
794
+
795
+ # Initialize the output
796
+ info = np.zeros(
797
+ span, dtype=[("val", int), ("maj", bool), ("min", bool), ("fmt", "|S8")]
798
+ )
799
+ info["val"] = np.arange(vmin, vmax + 1)
800
+ dates_ = info["val"]
801
+ info["fmt"] = ""
802
+ year_start = (dates_ % 12 == 0).nonzero()[0]
803
+ info_maj = info["maj"]
804
+ info_fmt = info["fmt"]
805
+
806
+ if span <= 1.15 * periodsperyear:
807
+ info_maj[year_start] = True
808
+ info["min"] = True
809
+
810
+ info_fmt[:] = "%b"
811
+ info_fmt[year_start] = "%b\n%Y"
812
+
813
+ if not has_level_label(year_start, vmin_orig):
814
+ if dates_.size > 1:
815
+ idx = 1
816
+ else:
817
+ idx = 0
818
+ info_fmt[idx] = "%b\n%Y"
819
+
820
+ elif span <= 2.5 * periodsperyear:
821
+ quarter_start = (dates_ % 3 == 0).nonzero()
822
+ info_maj[year_start] = True
823
+ # TODO: Check the following : is it really info['fmt'] ?
824
+ # 2023-09-15 this is reached in test_finder_monthly
825
+ info["fmt"][quarter_start] = True
826
+ info["min"] = True
827
+
828
+ info_fmt[quarter_start] = "%b"
829
+ info_fmt[year_start] = "%b\n%Y"
830
+
831
+ elif span <= 4 * periodsperyear:
832
+ info_maj[year_start] = True
833
+ info["min"] = True
834
+
835
+ jan_or_jul = (dates_ % 12 == 0) | (dates_ % 12 == 6)
836
+ info_fmt[jan_or_jul] = "%b"
837
+ info_fmt[year_start] = "%b\n%Y"
838
+
839
+ elif span <= 11 * periodsperyear:
840
+ quarter_start = (dates_ % 3 == 0).nonzero()
841
+ info_maj[year_start] = True
842
+ info["min"][quarter_start] = True
843
+
844
+ info_fmt[year_start] = "%Y"
845
+
846
+ else:
847
+ nyears = span / periodsperyear
848
+ (min_anndef, maj_anndef) = _get_default_annual_spacing(nyears)
849
+ years = dates_[year_start] // 12 + 1
850
+ major_idx = year_start[(years % maj_anndef == 0)]
851
+ info_maj[major_idx] = True
852
+ info["min"][year_start[(years % min_anndef == 0)]] = True
853
+
854
+ info_fmt[major_idx] = "%Y"
855
+
856
+ return info
857
+
858
+
859
+ @functools.cache
860
+ def _quarterly_finder(vmin: float, vmax: float, freq: BaseOffset) -> np.ndarray:
861
+ _, _, periodsperyear = _get_periods_per_ymd(freq)
862
+ vmin_orig = vmin
863
+ (vmin, vmax) = (int(vmin), int(vmax))
864
+ span = vmax - vmin + 1
865
+
866
+ info = np.zeros(
867
+ span, dtype=[("val", int), ("maj", bool), ("min", bool), ("fmt", "|S8")]
868
+ )
869
+ info["val"] = np.arange(vmin, vmax + 1)
870
+ info["fmt"] = ""
871
+ dates_ = info["val"]
872
+ info_maj = info["maj"]
873
+ info_fmt = info["fmt"]
874
+ year_start = (dates_ % 4 == 0).nonzero()[0]
875
+
876
+ if span <= 3.5 * periodsperyear:
877
+ info_maj[year_start] = True
878
+ info["min"] = True
879
+
880
+ info_fmt[:] = "Q%q"
881
+ info_fmt[year_start] = "Q%q\n%F"
882
+ if not has_level_label(year_start, vmin_orig):
883
+ if dates_.size > 1:
884
+ idx = 1
885
+ else:
886
+ idx = 0
887
+ info_fmt[idx] = "Q%q\n%F"
888
+
889
+ elif span <= 11 * periodsperyear:
890
+ info_maj[year_start] = True
891
+ info["min"] = True
892
+ info_fmt[year_start] = "%F"
893
+
894
+ else:
895
+ # https://github.com/pandas-dev/pandas/pull/47602
896
+ years = dates_[year_start] // 4 + 1970
897
+ nyears = span / periodsperyear
898
+ (min_anndef, maj_anndef) = _get_default_annual_spacing(nyears)
899
+ major_idx = year_start[(years % maj_anndef == 0)]
900
+ info_maj[major_idx] = True
901
+ info["min"][year_start[(years % min_anndef == 0)]] = True
902
+ info_fmt[major_idx] = "%F"
903
+
904
+ return info
905
+
906
+
907
+ @functools.cache
908
+ def _annual_finder(vmin: float, vmax: float, freq: BaseOffset) -> np.ndarray:
909
+ # Note: small difference here vs other finders in adding 1 to vmax
910
+ (vmin, vmax) = (int(vmin), int(vmax + 1))
911
+ span = vmax - vmin + 1
912
+
913
+ info = np.zeros(
914
+ span, dtype=[("val", int), ("maj", bool), ("min", bool), ("fmt", "|S8")]
915
+ )
916
+ info["val"] = np.arange(vmin, vmax + 1)
917
+ info["fmt"] = ""
918
+ dates_ = info["val"]
919
+
920
+ (min_anndef, maj_anndef) = _get_default_annual_spacing(span)
921
+ major_idx = dates_ % maj_anndef == 0
922
+ minor_idx = dates_ % min_anndef == 0
923
+ info["maj"][major_idx] = True
924
+ info["min"][minor_idx] = True
925
+ info["fmt"][major_idx] = "%Y"
926
+
927
+ return info
928
+
929
+
930
+ def get_finder(freq: BaseOffset):
931
+ # error: "BaseOffset" has no attribute "_period_dtype_code"
932
+ dtype_code = freq._period_dtype_code # type: ignore[attr-defined]
933
+ fgroup = FreqGroup.from_period_dtype_code(dtype_code)
934
+
935
+ if fgroup == FreqGroup.FR_ANN:
936
+ return _annual_finder
937
+ elif fgroup == FreqGroup.FR_QTR:
938
+ return _quarterly_finder
939
+ elif fgroup == FreqGroup.FR_MTH:
940
+ return _monthly_finder
941
+ elif (dtype_code >= FreqGroup.FR_BUS.value) or fgroup == FreqGroup.FR_WK:
942
+ return _daily_finder
943
+ else: # pragma: no cover
944
+ raise NotImplementedError(f"Unsupported frequency: {dtype_code}")
945
+
946
+
947
+ class TimeSeries_DateLocator(Locator):
948
+ """
949
+ Locates the ticks along an axis controlled by a :class:`Series`.
950
+
951
+ Parameters
952
+ ----------
953
+ freq : BaseOffset
954
+ Valid frequency specifier.
955
+ minor_locator : {False, True}, optional
956
+ Whether the locator is for minor ticks (True) or not.
957
+ dynamic_mode : {True, False}, optional
958
+ Whether the locator should work in dynamic mode.
959
+ base : {int}, optional
960
+ quarter : {int}, optional
961
+ month : {int}, optional
962
+ day : {int}, optional
963
+ """
964
+
965
+ axis: Axis
966
+
967
+ def __init__(
968
+ self,
969
+ freq: BaseOffset,
970
+ minor_locator: bool = False,
971
+ dynamic_mode: bool = True,
972
+ base: int = 1,
973
+ quarter: int = 1,
974
+ month: int = 1,
975
+ day: int = 1,
976
+ plot_obj=None,
977
+ ) -> None:
978
+ freq = to_offset(freq, is_period=True)
979
+ self.freq = freq
980
+ self.base = base
981
+ (self.quarter, self.month, self.day) = (quarter, month, day)
982
+ self.isminor = minor_locator
983
+ self.isdynamic = dynamic_mode
984
+ self.offset = 0
985
+ self.plot_obj = plot_obj
986
+ self.finder = get_finder(freq)
987
+
988
+ def _get_default_locs(self, vmin, vmax):
989
+ """Returns the default locations of ticks."""
990
+ locator = self.finder(vmin, vmax, self.freq)
991
+
992
+ if self.isminor:
993
+ return np.compress(locator["min"], locator["val"])
994
+ return np.compress(locator["maj"], locator["val"])
995
+
996
+ def __call__(self):
997
+ """Return the locations of the ticks."""
998
+ # axis calls Locator.set_axis inside set_m<xxxx>_formatter
999
+
1000
+ vi = tuple(self.axis.get_view_interval())
1001
+ vmin, vmax = vi
1002
+ if vmax < vmin:
1003
+ vmin, vmax = vmax, vmin
1004
+ if self.isdynamic:
1005
+ locs = self._get_default_locs(vmin, vmax)
1006
+ else: # pragma: no cover
1007
+ base = self.base
1008
+ (d, m) = divmod(vmin, base)
1009
+ vmin = (d + 1) * base
1010
+ # error: No overload variant of "range" matches argument types "float",
1011
+ # "float", "int"
1012
+ locs = list(range(vmin, vmax + 1, base)) # type: ignore[call-overload]
1013
+ return locs
1014
+
1015
+ def autoscale(self):
1016
+ """
1017
+ Sets the view limits to the nearest multiples of base that contain the
1018
+ data.
1019
+ """
1020
+ # requires matplotlib >= 0.98.0
1021
+ (vmin, vmax) = self.axis.get_data_interval()
1022
+
1023
+ locs = self._get_default_locs(vmin, vmax)
1024
+ (vmin, vmax) = locs[[0, -1]]
1025
+ if vmin == vmax:
1026
+ vmin -= 1
1027
+ vmax += 1
1028
+ return nonsingular(vmin, vmax)
1029
+
1030
+
1031
+ # -------------------------------------------------------------------------
1032
+ # --- Formatter ---
1033
+ # -------------------------------------------------------------------------
1034
+
1035
+
1036
+ class TimeSeries_DateFormatter(Formatter):
1037
+ """
1038
+ Formats the ticks along an axis controlled by a :class:`PeriodIndex`.
1039
+
1040
+ Parameters
1041
+ ----------
1042
+ freq : BaseOffset
1043
+ Valid frequency specifier.
1044
+ minor_locator : bool, default False
1045
+ Whether the current formatter should apply to minor ticks (True) or
1046
+ major ticks (False).
1047
+ dynamic_mode : bool, default True
1048
+ Whether the formatter works in dynamic mode or not.
1049
+ """
1050
+
1051
+ axis: Axis
1052
+
1053
+ def __init__(
1054
+ self,
1055
+ freq: BaseOffset,
1056
+ minor_locator: bool = False,
1057
+ dynamic_mode: bool = True,
1058
+ plot_obj=None,
1059
+ ) -> None:
1060
+ freq = to_offset(freq, is_period=True)
1061
+ self.format = None
1062
+ self.freq = freq
1063
+ self.locs: list[Any] = [] # unused, for matplotlib compat
1064
+ self.formatdict: dict[Any, Any] | None = None
1065
+ self.isminor = minor_locator
1066
+ self.isdynamic = dynamic_mode
1067
+ self.offset = 0
1068
+ self.plot_obj = plot_obj
1069
+ self.finder = get_finder(freq)
1070
+
1071
+ def _set_default_format(self, vmin, vmax):
1072
+ """Returns the default ticks spacing."""
1073
+ info = self.finder(vmin, vmax, self.freq)
1074
+
1075
+ if self.isminor:
1076
+ format = np.compress(info["min"] & np.logical_not(info["maj"]), info)
1077
+ else:
1078
+ format = np.compress(info["maj"], info)
1079
+ self.formatdict = {x: f for (x, _, _, f) in format}
1080
+ return self.formatdict
1081
+
1082
+ def set_locs(self, locs) -> None:
1083
+ """Sets the locations of the ticks"""
1084
+ # don't actually use the locs. This is just needed to work with
1085
+ # matplotlib. Force to use vmin, vmax
1086
+
1087
+ self.locs = locs
1088
+
1089
+ (vmin, vmax) = tuple(self.axis.get_view_interval())
1090
+ if vmax < vmin:
1091
+ (vmin, vmax) = (vmax, vmin)
1092
+ self._set_default_format(vmin, vmax)
1093
+
1094
+ def __call__(self, x, pos: int | None = 0) -> str:
1095
+ if self.formatdict is None:
1096
+ return ""
1097
+ else:
1098
+ fmt = self.formatdict.pop(x, "")
1099
+ if isinstance(fmt, np.bytes_):
1100
+ fmt = fmt.decode("utf-8")
1101
+ with warnings.catch_warnings():
1102
+ warnings.filterwarnings(
1103
+ "ignore",
1104
+ "Period with BDay freq is deprecated",
1105
+ category=FutureWarning,
1106
+ )
1107
+ period = Period(ordinal=int(x), freq=self.freq)
1108
+ assert isinstance(period, Period)
1109
+ return period.strftime(fmt)
1110
+
1111
+
1112
+ class TimeSeries_TimedeltaFormatter(Formatter):
1113
+ """
1114
+ Formats the ticks along an axis controlled by a :class:`TimedeltaIndex`.
1115
+ """
1116
+
1117
+ axis: Axis
1118
+
1119
+ @staticmethod
1120
+ def format_timedelta_ticks(x, pos, n_decimals: int) -> str:
1121
+ """
1122
+ Convert seconds to 'D days HH:MM:SS.F'
1123
+ """
1124
+ s, ns = divmod(x, 10**9) # TODO(non-nano): this looks like it assumes ns
1125
+ m, s = divmod(s, 60)
1126
+ h, m = divmod(m, 60)
1127
+ d, h = divmod(h, 24)
1128
+ decimals = int(ns * 10 ** (n_decimals - 9))
1129
+ s = f"{int(h):02d}:{int(m):02d}:{int(s):02d}"
1130
+ if n_decimals > 0:
1131
+ s += f".{decimals:0{n_decimals}d}"
1132
+ if d != 0:
1133
+ s = f"{int(d):d} days {s}"
1134
+ return s
1135
+
1136
+ def __call__(self, x, pos: int | None = 0) -> str:
1137
+ (vmin, vmax) = tuple(self.axis.get_view_interval())
1138
+ n_decimals = min(int(np.ceil(np.log10(100 * 10**9 / abs(vmax - vmin)))), 9)
1139
+ return self.format_timedelta_ticks(x, pos, n_decimals)
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/core.py ADDED
@@ -0,0 +1,2125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from abc import (
4
+ ABC,
5
+ abstractmethod,
6
+ )
7
+ from collections.abc import (
8
+ Hashable,
9
+ Iterable,
10
+ Iterator,
11
+ Sequence,
12
+ )
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Literal,
17
+ cast,
18
+ final,
19
+ )
20
+ import warnings
21
+
22
+ import matplotlib as mpl
23
+ import numpy as np
24
+
25
+ from pandas._libs import lib
26
+ from pandas.errors import AbstractMethodError
27
+ from pandas.util._decorators import cache_readonly
28
+ from pandas.util._exceptions import find_stack_level
29
+
30
+ from pandas.core.dtypes.common import (
31
+ is_any_real_numeric_dtype,
32
+ is_bool,
33
+ is_float,
34
+ is_float_dtype,
35
+ is_hashable,
36
+ is_integer,
37
+ is_integer_dtype,
38
+ is_iterator,
39
+ is_list_like,
40
+ is_number,
41
+ is_numeric_dtype,
42
+ )
43
+ from pandas.core.dtypes.dtypes import (
44
+ CategoricalDtype,
45
+ ExtensionDtype,
46
+ )
47
+ from pandas.core.dtypes.generic import (
48
+ ABCDataFrame,
49
+ ABCDatetimeIndex,
50
+ ABCIndex,
51
+ ABCMultiIndex,
52
+ ABCPeriodIndex,
53
+ ABCSeries,
54
+ )
55
+ from pandas.core.dtypes.missing import isna
56
+
57
+ import pandas.core.common as com
58
+ from pandas.core.frame import DataFrame
59
+ from pandas.util.version import Version
60
+
61
+ from pandas.io.formats.printing import pprint_thing
62
+ from pandas.plotting._matplotlib import tools
63
+ from pandas.plotting._matplotlib.converter import register_pandas_matplotlib_converters
64
+ from pandas.plotting._matplotlib.groupby import reconstruct_data_with_by
65
+ from pandas.plotting._matplotlib.misc import unpack_single_str_list
66
+ from pandas.plotting._matplotlib.style import get_standard_colors
67
+ from pandas.plotting._matplotlib.timeseries import (
68
+ decorate_axes,
69
+ format_dateaxis,
70
+ maybe_convert_index,
71
+ maybe_resample,
72
+ use_dynamic_x,
73
+ )
74
+ from pandas.plotting._matplotlib.tools import (
75
+ create_subplots,
76
+ flatten_axes,
77
+ format_date_labels,
78
+ get_all_lines,
79
+ get_xlim,
80
+ handle_shared_axes,
81
+ )
82
+
83
+ if TYPE_CHECKING:
84
+ from matplotlib.artist import Artist
85
+ from matplotlib.axes import Axes
86
+ from matplotlib.axis import Axis
87
+ from matplotlib.figure import Figure
88
+
89
+ from pandas._typing import (
90
+ IndexLabel,
91
+ NDFrameT,
92
+ PlottingOrientation,
93
+ npt,
94
+ )
95
+
96
+ from pandas import Series
97
+
98
+
99
+ def _color_in_style(style: str) -> bool:
100
+ """
101
+ Check if there is a color letter in the style string.
102
+ """
103
+ from matplotlib.colors import BASE_COLORS
104
+
105
+ return not set(BASE_COLORS).isdisjoint(style)
106
+
107
+
108
+ class MPLPlot(ABC):
109
+ """
110
+ Base class for assembling a pandas plot using matplotlib
111
+
112
+ Parameters
113
+ ----------
114
+ data :
115
+
116
+ """
117
+
118
+ @property
119
+ @abstractmethod
120
+ def _kind(self) -> str:
121
+ """Specify kind str. Must be overridden in child class"""
122
+ raise NotImplementedError
123
+
124
+ _layout_type = "vertical"
125
+ _default_rot = 0
126
+
127
+ @property
128
+ def orientation(self) -> str | None:
129
+ return None
130
+
131
+ data: DataFrame
132
+
133
+ def __init__(
134
+ self,
135
+ data,
136
+ kind=None,
137
+ by: IndexLabel | None = None,
138
+ subplots: bool | Sequence[Sequence[str]] = False,
139
+ sharex: bool | None = None,
140
+ sharey: bool = False,
141
+ use_index: bool = True,
142
+ figsize: tuple[float, float] | None = None,
143
+ grid=None,
144
+ legend: bool | str = True,
145
+ rot=None,
146
+ ax=None,
147
+ fig=None,
148
+ title=None,
149
+ xlim=None,
150
+ ylim=None,
151
+ xticks=None,
152
+ yticks=None,
153
+ xlabel: Hashable | None = None,
154
+ ylabel: Hashable | None = None,
155
+ fontsize: int | None = None,
156
+ secondary_y: bool | tuple | list | np.ndarray = False,
157
+ colormap=None,
158
+ table: bool = False,
159
+ layout=None,
160
+ include_bool: bool = False,
161
+ column: IndexLabel | None = None,
162
+ *,
163
+ logx: bool | None | Literal["sym"] = False,
164
+ logy: bool | None | Literal["sym"] = False,
165
+ loglog: bool | None | Literal["sym"] = False,
166
+ mark_right: bool = True,
167
+ stacked: bool = False,
168
+ label: Hashable | None = None,
169
+ style=None,
170
+ **kwds,
171
+ ) -> None:
172
+ import matplotlib.pyplot as plt
173
+
174
+ # if users assign an empty list or tuple, raise `ValueError`
175
+ # similar to current `df.box` and `df.hist` APIs.
176
+ if by in ([], ()):
177
+ raise ValueError("No group keys passed!")
178
+ self.by = com.maybe_make_list(by)
179
+
180
+ # Assign the rest of columns into self.columns if by is explicitly defined
181
+ # while column is not, only need `columns` in hist/box plot when it's DF
182
+ # TODO: Might deprecate `column` argument in future PR (#28373)
183
+ if isinstance(data, DataFrame):
184
+ if column:
185
+ self.columns = com.maybe_make_list(column)
186
+ elif self.by is None:
187
+ self.columns = [
188
+ col for col in data.columns if is_numeric_dtype(data[col])
189
+ ]
190
+ else:
191
+ self.columns = [
192
+ col
193
+ for col in data.columns
194
+ if col not in self.by and is_numeric_dtype(data[col])
195
+ ]
196
+
197
+ # For `hist` plot, need to get grouped original data before `self.data` is
198
+ # updated later
199
+ if self.by is not None and self._kind == "hist":
200
+ self._grouped = data.groupby(unpack_single_str_list(self.by))
201
+
202
+ self.kind = kind
203
+
204
+ self.subplots = type(self)._validate_subplots_kwarg(
205
+ subplots, data, kind=self._kind
206
+ )
207
+
208
+ self.sharex = type(self)._validate_sharex(sharex, ax, by)
209
+ self.sharey = sharey
210
+ self.figsize = figsize
211
+ self.layout = layout
212
+
213
+ self.xticks = xticks
214
+ self.yticks = yticks
215
+ self.xlim = xlim
216
+ self.ylim = ylim
217
+ self.title = title
218
+ self.use_index = use_index
219
+ self.xlabel = xlabel
220
+ self.ylabel = ylabel
221
+
222
+ self.fontsize = fontsize
223
+
224
+ if rot is not None:
225
+ self.rot = rot
226
+ # need to know for format_date_labels since it's rotated to 30 by
227
+ # default
228
+ self._rot_set = True
229
+ else:
230
+ self._rot_set = False
231
+ self.rot = self._default_rot
232
+
233
+ if grid is None:
234
+ grid = False if secondary_y else plt.rcParams["axes.grid"]
235
+
236
+ self.grid = grid
237
+ self.legend = legend
238
+ self.legend_handles: list[Artist] = []
239
+ self.legend_labels: list[Hashable] = []
240
+
241
+ self.logx = type(self)._validate_log_kwd("logx", logx)
242
+ self.logy = type(self)._validate_log_kwd("logy", logy)
243
+ self.loglog = type(self)._validate_log_kwd("loglog", loglog)
244
+ self.label = label
245
+ self.style = style
246
+ self.mark_right = mark_right
247
+ self.stacked = stacked
248
+
249
+ # ax may be an Axes object or (if self.subplots) an ndarray of
250
+ # Axes objects
251
+ self.ax = ax
252
+ # TODO: deprecate fig keyword as it is ignored, not passed in tests
253
+ # as of 2023-11-05
254
+
255
+ # parse errorbar input if given
256
+ xerr = kwds.pop("xerr", None)
257
+ yerr = kwds.pop("yerr", None)
258
+ nseries = self._get_nseries(data)
259
+ xerr, data = type(self)._parse_errorbars("xerr", xerr, data, nseries)
260
+ yerr, data = type(self)._parse_errorbars("yerr", yerr, data, nseries)
261
+ self.errors = {"xerr": xerr, "yerr": yerr}
262
+ self.data = data
263
+
264
+ if not isinstance(secondary_y, (bool, tuple, list, np.ndarray, ABCIndex)):
265
+ secondary_y = [secondary_y]
266
+ self.secondary_y = secondary_y
267
+
268
+ # ugly TypeError if user passes matplotlib's `cmap` name.
269
+ # Probably better to accept either.
270
+ if "cmap" in kwds and colormap:
271
+ raise TypeError("Only specify one of `cmap` and `colormap`.")
272
+ if "cmap" in kwds:
273
+ self.colormap = kwds.pop("cmap")
274
+ else:
275
+ self.colormap = colormap
276
+
277
+ self.table = table
278
+ self.include_bool = include_bool
279
+
280
+ self.kwds = kwds
281
+
282
+ color = kwds.pop("color", lib.no_default)
283
+ self.color = self._validate_color_args(color, self.colormap)
284
+ assert "color" not in self.kwds
285
+
286
+ self.data = self._ensure_frame(self.data)
287
+
288
+ @final
289
+ @staticmethod
290
+ def _validate_sharex(sharex: bool | None, ax, by) -> bool:
291
+ if sharex is None:
292
+ # if by is defined, subplots are used and sharex should be False
293
+ if ax is None and by is None: # pylint: disable=simplifiable-if-statement
294
+ sharex = True
295
+ else:
296
+ # if we get an axis, the users should do the visibility
297
+ # setting...
298
+ sharex = False
299
+ elif not is_bool(sharex):
300
+ raise TypeError("sharex must be a bool or None")
301
+ return bool(sharex)
302
+
303
+ @classmethod
304
+ def _validate_log_kwd(
305
+ cls,
306
+ kwd: str,
307
+ value: bool | None | Literal["sym"],
308
+ ) -> bool | None | Literal["sym"]:
309
+ if (
310
+ value is None
311
+ or isinstance(value, bool)
312
+ or (isinstance(value, str) and value == "sym")
313
+ ):
314
+ return value
315
+ raise ValueError(
316
+ f"keyword '{kwd}' should be bool, None, or 'sym', not '{value}'"
317
+ )
318
+
319
+ @final
320
+ @staticmethod
321
+ def _validate_subplots_kwarg(
322
+ subplots: bool | Sequence[Sequence[str]], data: Series | DataFrame, kind: str
323
+ ) -> bool | list[tuple[int, ...]]:
324
+ """
325
+ Validate the subplots parameter
326
+
327
+ - check type and content
328
+ - check for duplicate columns
329
+ - check for invalid column names
330
+ - convert column names into indices
331
+ - add missing columns in a group of their own
332
+ See comments in code below for more details.
333
+
334
+ Parameters
335
+ ----------
336
+ subplots : subplots parameters as passed to PlotAccessor
337
+
338
+ Returns
339
+ -------
340
+ validated subplots : a bool or a list of tuples of column indices. Columns
341
+ in the same tuple will be grouped together in the resulting plot.
342
+ """
343
+
344
+ if isinstance(subplots, bool):
345
+ return subplots
346
+ elif not isinstance(subplots, Iterable):
347
+ raise ValueError("subplots should be a bool or an iterable")
348
+
349
+ supported_kinds = (
350
+ "line",
351
+ "bar",
352
+ "barh",
353
+ "hist",
354
+ "kde",
355
+ "density",
356
+ "area",
357
+ "pie",
358
+ )
359
+ if kind not in supported_kinds:
360
+ raise ValueError(
361
+ "When subplots is an iterable, kind must be "
362
+ f"one of {', '.join(supported_kinds)}. Got {kind}."
363
+ )
364
+
365
+ if isinstance(data, ABCSeries):
366
+ raise NotImplementedError(
367
+ "An iterable subplots for a Series is not supported."
368
+ )
369
+
370
+ columns = data.columns
371
+ if isinstance(columns, ABCMultiIndex):
372
+ raise NotImplementedError(
373
+ "An iterable subplots for a DataFrame with a MultiIndex column "
374
+ "is not supported."
375
+ )
376
+
377
+ if columns.nunique() != len(columns):
378
+ raise NotImplementedError(
379
+ "An iterable subplots for a DataFrame with non-unique column "
380
+ "labels is not supported."
381
+ )
382
+
383
+ # subplots is a list of tuples where each tuple is a group of
384
+ # columns to be grouped together (one ax per group).
385
+ # we consolidate the subplots list such that:
386
+ # - the tuples contain indices instead of column names
387
+ # - the columns that aren't yet in the list are added in a group
388
+ # of their own.
389
+ # For example with columns from a to g, and
390
+ # subplots = [(a, c), (b, f, e)],
391
+ # we end up with [(ai, ci), (bi, fi, ei), (di,), (gi,)]
392
+ # This way, we can handle self.subplots in a homogeneous manner
393
+ # later.
394
+ # TODO: also accept indices instead of just names?
395
+
396
+ out = []
397
+ seen_columns: set[Hashable] = set()
398
+ for group in subplots:
399
+ if not is_list_like(group):
400
+ raise ValueError(
401
+ "When subplots is an iterable, each entry "
402
+ "should be a list/tuple of column names."
403
+ )
404
+ idx_locs = columns.get_indexer_for(group)
405
+ if (idx_locs == -1).any():
406
+ bad_labels = np.extract(idx_locs == -1, group)
407
+ raise ValueError(
408
+ f"Column label(s) {list(bad_labels)} not found in the DataFrame."
409
+ )
410
+ unique_columns = set(group)
411
+ duplicates = seen_columns.intersection(unique_columns)
412
+ if duplicates:
413
+ raise ValueError(
414
+ "Each column should be in only one subplot. "
415
+ f"Columns {duplicates} were found in multiple subplots."
416
+ )
417
+ seen_columns = seen_columns.union(unique_columns)
418
+ out.append(tuple(idx_locs))
419
+
420
+ unseen_columns = columns.difference(seen_columns)
421
+ for column in unseen_columns:
422
+ idx_loc = columns.get_loc(column)
423
+ out.append((idx_loc,))
424
+ return out
425
+
426
+ def _validate_color_args(self, color, colormap):
427
+ if color is lib.no_default:
428
+ # It was not provided by the user
429
+ if "colors" in self.kwds and colormap is not None:
430
+ warnings.warn(
431
+ "'color' and 'colormap' cannot be used simultaneously. "
432
+ "Using 'color'",
433
+ stacklevel=find_stack_level(),
434
+ )
435
+ return None
436
+ if self.nseries == 1 and color is not None and not is_list_like(color):
437
+ # support series.plot(color='green')
438
+ color = [color]
439
+
440
+ if isinstance(color, tuple) and self.nseries == 1 and len(color) in (3, 4):
441
+ # support RGB and RGBA tuples in series plot
442
+ color = [color]
443
+
444
+ if colormap is not None:
445
+ warnings.warn(
446
+ "'color' and 'colormap' cannot be used simultaneously. Using 'color'",
447
+ stacklevel=find_stack_level(),
448
+ )
449
+
450
+ if self.style is not None:
451
+ if is_list_like(self.style):
452
+ styles = self.style
453
+ else:
454
+ styles = [self.style]
455
+ # need only a single match
456
+ for s in styles:
457
+ if _color_in_style(s):
458
+ raise ValueError(
459
+ "Cannot pass 'style' string with a color symbol and "
460
+ "'color' keyword argument. Please use one or the "
461
+ "other or pass 'style' without a color symbol"
462
+ )
463
+ return color
464
+
465
+ @final
466
+ @staticmethod
467
+ def _iter_data(
468
+ data: DataFrame | dict[Hashable, Series | DataFrame]
469
+ ) -> Iterator[tuple[Hashable, np.ndarray]]:
470
+ for col, values in data.items():
471
+ # This was originally written to use values.values before EAs
472
+ # were implemented; adding np.asarray(...) to keep consistent
473
+ # typing.
474
+ yield col, np.asarray(values.values)
475
+
476
+ def _get_nseries(self, data: Series | DataFrame) -> int:
477
+ # When `by` is explicitly assigned, grouped data size will be defined, and
478
+ # this will determine number of subplots to have, aka `self.nseries`
479
+ if data.ndim == 1:
480
+ return 1
481
+ elif self.by is not None and self._kind == "hist":
482
+ return len(self._grouped)
483
+ elif self.by is not None and self._kind == "box":
484
+ return len(self.columns)
485
+ else:
486
+ return data.shape[1]
487
+
488
+ @final
489
+ @property
490
+ def nseries(self) -> int:
491
+ return self._get_nseries(self.data)
492
+
493
+ @final
494
+ def draw(self) -> None:
495
+ self.plt.draw_if_interactive()
496
+
497
+ @final
498
+ def generate(self) -> None:
499
+ self._compute_plot_data()
500
+ fig = self.fig
501
+ self._make_plot(fig)
502
+ self._add_table()
503
+ self._make_legend()
504
+ self._adorn_subplots(fig)
505
+
506
+ for ax in self.axes:
507
+ self._post_plot_logic_common(ax)
508
+ self._post_plot_logic(ax, self.data)
509
+
510
+ @final
511
+ @staticmethod
512
+ def _has_plotted_object(ax: Axes) -> bool:
513
+ """check whether ax has data"""
514
+ return len(ax.lines) != 0 or len(ax.artists) != 0 or len(ax.containers) != 0
515
+
516
+ @final
517
+ def _maybe_right_yaxis(self, ax: Axes, axes_num: int) -> Axes:
518
+ if not self.on_right(axes_num):
519
+ # secondary axes may be passed via ax kw
520
+ return self._get_ax_layer(ax)
521
+
522
+ if hasattr(ax, "right_ax"):
523
+ # if it has right_ax property, ``ax`` must be left axes
524
+ return ax.right_ax
525
+ elif hasattr(ax, "left_ax"):
526
+ # if it has left_ax property, ``ax`` must be right axes
527
+ return ax
528
+ else:
529
+ # otherwise, create twin axes
530
+ orig_ax, new_ax = ax, ax.twinx()
531
+ # TODO: use Matplotlib public API when available
532
+ new_ax._get_lines = orig_ax._get_lines # type: ignore[attr-defined]
533
+ # TODO #54485
534
+ new_ax._get_patches_for_fill = ( # type: ignore[attr-defined]
535
+ orig_ax._get_patches_for_fill # type: ignore[attr-defined]
536
+ )
537
+ # TODO #54485
538
+ orig_ax.right_ax, new_ax.left_ax = ( # type: ignore[attr-defined]
539
+ new_ax,
540
+ orig_ax,
541
+ )
542
+
543
+ if not self._has_plotted_object(orig_ax): # no data on left y
544
+ orig_ax.get_yaxis().set_visible(False)
545
+
546
+ if self.logy is True or self.loglog is True:
547
+ new_ax.set_yscale("log")
548
+ elif self.logy == "sym" or self.loglog == "sym":
549
+ new_ax.set_yscale("symlog")
550
+ return new_ax
551
+
552
+ @final
553
+ @cache_readonly
554
+ def fig(self) -> Figure:
555
+ return self._axes_and_fig[1]
556
+
557
+ @final
558
+ @cache_readonly
559
+ # TODO: can we annotate this as both a Sequence[Axes] and ndarray[object]?
560
+ def axes(self) -> Sequence[Axes]:
561
+ return self._axes_and_fig[0]
562
+
563
+ @final
564
+ @cache_readonly
565
+ def _axes_and_fig(self) -> tuple[Sequence[Axes], Figure]:
566
+ if self.subplots:
567
+ naxes = (
568
+ self.nseries if isinstance(self.subplots, bool) else len(self.subplots)
569
+ )
570
+ fig, axes = create_subplots(
571
+ naxes=naxes,
572
+ sharex=self.sharex,
573
+ sharey=self.sharey,
574
+ figsize=self.figsize,
575
+ ax=self.ax,
576
+ layout=self.layout,
577
+ layout_type=self._layout_type,
578
+ )
579
+ elif self.ax is None:
580
+ fig = self.plt.figure(figsize=self.figsize)
581
+ axes = fig.add_subplot(111)
582
+ else:
583
+ fig = self.ax.get_figure()
584
+ if self.figsize is not None:
585
+ fig.set_size_inches(self.figsize)
586
+ axes = self.ax
587
+
588
+ axes = flatten_axes(axes)
589
+
590
+ if self.logx is True or self.loglog is True:
591
+ [a.set_xscale("log") for a in axes]
592
+ elif self.logx == "sym" or self.loglog == "sym":
593
+ [a.set_xscale("symlog") for a in axes]
594
+
595
+ if self.logy is True or self.loglog is True:
596
+ [a.set_yscale("log") for a in axes]
597
+ elif self.logy == "sym" or self.loglog == "sym":
598
+ [a.set_yscale("symlog") for a in axes]
599
+
600
+ axes_seq = cast(Sequence["Axes"], axes)
601
+ return axes_seq, fig
602
+
603
+ @property
604
+ def result(self):
605
+ """
606
+ Return result axes
607
+ """
608
+ if self.subplots:
609
+ if self.layout is not None and not is_list_like(self.ax):
610
+ # error: "Sequence[Any]" has no attribute "reshape"
611
+ return self.axes.reshape(*self.layout) # type: ignore[attr-defined]
612
+ else:
613
+ return self.axes
614
+ else:
615
+ sec_true = isinstance(self.secondary_y, bool) and self.secondary_y
616
+ # error: Argument 1 to "len" has incompatible type "Union[bool,
617
+ # Tuple[Any, ...], List[Any], ndarray[Any, Any]]"; expected "Sized"
618
+ all_sec = (
619
+ is_list_like(self.secondary_y)
620
+ and len(self.secondary_y) == self.nseries # type: ignore[arg-type]
621
+ )
622
+ if sec_true or all_sec:
623
+ # if all data is plotted on secondary, return right axes
624
+ return self._get_ax_layer(self.axes[0], primary=False)
625
+ else:
626
+ return self.axes[0]
627
+
628
+ @final
629
+ @staticmethod
630
+ def _convert_to_ndarray(data):
631
+ # GH31357: categorical columns are processed separately
632
+ if isinstance(data.dtype, CategoricalDtype):
633
+ return data
634
+
635
+ # GH32073: cast to float if values contain nulled integers
636
+ if (is_integer_dtype(data.dtype) or is_float_dtype(data.dtype)) and isinstance(
637
+ data.dtype, ExtensionDtype
638
+ ):
639
+ return data.to_numpy(dtype="float", na_value=np.nan)
640
+
641
+ # GH25587: cast ExtensionArray of pandas (IntegerArray, etc.) to
642
+ # np.ndarray before plot.
643
+ if len(data) > 0:
644
+ return np.asarray(data)
645
+
646
+ return data
647
+
648
+ @final
649
+ def _ensure_frame(self, data) -> DataFrame:
650
+ if isinstance(data, ABCSeries):
651
+ label = self.label
652
+ if label is None and data.name is None:
653
+ label = ""
654
+ if label is None:
655
+ # We'll end up with columns of [0] instead of [None]
656
+ data = data.to_frame()
657
+ else:
658
+ data = data.to_frame(name=label)
659
+ elif self._kind in ("hist", "box"):
660
+ cols = self.columns if self.by is None else self.columns + self.by
661
+ data = data.loc[:, cols]
662
+ return data
663
+
664
+ @final
665
+ def _compute_plot_data(self) -> None:
666
+ data = self.data
667
+
668
+ # GH15079 reconstruct data if by is defined
669
+ if self.by is not None:
670
+ self.subplots = True
671
+ data = reconstruct_data_with_by(self.data, by=self.by, cols=self.columns)
672
+
673
+ # GH16953, infer_objects is needed as fallback, for ``Series``
674
+ # with ``dtype == object``
675
+ data = data.infer_objects(copy=False)
676
+ include_type = [np.number, "datetime", "datetimetz", "timedelta"]
677
+
678
+ # GH23719, allow plotting boolean
679
+ if self.include_bool is True:
680
+ include_type.append(np.bool_)
681
+
682
+ # GH22799, exclude datetime-like type for boxplot
683
+ exclude_type = None
684
+ if self._kind == "box":
685
+ # TODO: change after solving issue 27881
686
+ include_type = [np.number]
687
+ exclude_type = ["timedelta"]
688
+
689
+ # GH 18755, include object and category type for scatter plot
690
+ if self._kind == "scatter":
691
+ include_type.extend(["object", "category", "string"])
692
+
693
+ numeric_data = data.select_dtypes(include=include_type, exclude=exclude_type)
694
+
695
+ is_empty = numeric_data.shape[-1] == 0
696
+ # no non-numeric frames or series allowed
697
+ if is_empty:
698
+ raise TypeError("no numeric data to plot")
699
+
700
+ self.data = numeric_data.apply(type(self)._convert_to_ndarray)
701
+
702
+ def _make_plot(self, fig: Figure) -> None:
703
+ raise AbstractMethodError(self)
704
+
705
+ @final
706
+ def _add_table(self) -> None:
707
+ if self.table is False:
708
+ return
709
+ elif self.table is True:
710
+ data = self.data.transpose()
711
+ else:
712
+ data = self.table
713
+ ax = self._get_ax(0)
714
+ tools.table(ax, data)
715
+
716
+ @final
717
+ def _post_plot_logic_common(self, ax: Axes) -> None:
718
+ """Common post process for each axes"""
719
+ if self.orientation == "vertical" or self.orientation is None:
720
+ type(self)._apply_axis_properties(
721
+ ax.xaxis, rot=self.rot, fontsize=self.fontsize
722
+ )
723
+ type(self)._apply_axis_properties(ax.yaxis, fontsize=self.fontsize)
724
+
725
+ if hasattr(ax, "right_ax"):
726
+ type(self)._apply_axis_properties(
727
+ ax.right_ax.yaxis, fontsize=self.fontsize
728
+ )
729
+
730
+ elif self.orientation == "horizontal":
731
+ type(self)._apply_axis_properties(
732
+ ax.yaxis, rot=self.rot, fontsize=self.fontsize
733
+ )
734
+ type(self)._apply_axis_properties(ax.xaxis, fontsize=self.fontsize)
735
+
736
+ if hasattr(ax, "right_ax"):
737
+ type(self)._apply_axis_properties(
738
+ ax.right_ax.yaxis, fontsize=self.fontsize
739
+ )
740
+ else: # pragma no cover
741
+ raise ValueError
742
+
743
+ @abstractmethod
744
+ def _post_plot_logic(self, ax: Axes, data) -> None:
745
+ """Post process for each axes. Overridden in child classes"""
746
+
747
+ @final
748
+ def _adorn_subplots(self, fig: Figure) -> None:
749
+ """Common post process unrelated to data"""
750
+ if len(self.axes) > 0:
751
+ all_axes = self._get_subplots(fig)
752
+ nrows, ncols = self._get_axes_layout(fig)
753
+ handle_shared_axes(
754
+ axarr=all_axes,
755
+ nplots=len(all_axes),
756
+ naxes=nrows * ncols,
757
+ nrows=nrows,
758
+ ncols=ncols,
759
+ sharex=self.sharex,
760
+ sharey=self.sharey,
761
+ )
762
+
763
+ for ax in self.axes:
764
+ ax = getattr(ax, "right_ax", ax)
765
+ if self.yticks is not None:
766
+ ax.set_yticks(self.yticks)
767
+
768
+ if self.xticks is not None:
769
+ ax.set_xticks(self.xticks)
770
+
771
+ if self.ylim is not None:
772
+ ax.set_ylim(self.ylim)
773
+
774
+ if self.xlim is not None:
775
+ ax.set_xlim(self.xlim)
776
+
777
+ # GH9093, currently Pandas does not show ylabel, so if users provide
778
+ # ylabel will set it as ylabel in the plot.
779
+ if self.ylabel is not None:
780
+ ax.set_ylabel(pprint_thing(self.ylabel))
781
+
782
+ ax.grid(self.grid)
783
+
784
+ if self.title:
785
+ if self.subplots:
786
+ if is_list_like(self.title):
787
+ if len(self.title) != self.nseries:
788
+ raise ValueError(
789
+ "The length of `title` must equal the number "
790
+ "of columns if using `title` of type `list` "
791
+ "and `subplots=True`.\n"
792
+ f"length of title = {len(self.title)}\n"
793
+ f"number of columns = {self.nseries}"
794
+ )
795
+
796
+ for ax, title in zip(self.axes, self.title):
797
+ ax.set_title(title)
798
+ else:
799
+ fig.suptitle(self.title)
800
+ else:
801
+ if is_list_like(self.title):
802
+ msg = (
803
+ "Using `title` of type `list` is not supported "
804
+ "unless `subplots=True` is passed"
805
+ )
806
+ raise ValueError(msg)
807
+ self.axes[0].set_title(self.title)
808
+
809
+ @final
810
+ @staticmethod
811
+ def _apply_axis_properties(
812
+ axis: Axis, rot=None, fontsize: int | None = None
813
+ ) -> None:
814
+ """
815
+ Tick creation within matplotlib is reasonably expensive and is
816
+ internally deferred until accessed as Ticks are created/destroyed
817
+ multiple times per draw. It's therefore beneficial for us to avoid
818
+ accessing unless we will act on the Tick.
819
+ """
820
+ if rot is not None or fontsize is not None:
821
+ # rot=0 is a valid setting, hence the explicit None check
822
+ labels = axis.get_majorticklabels() + axis.get_minorticklabels()
823
+ for label in labels:
824
+ if rot is not None:
825
+ label.set_rotation(rot)
826
+ if fontsize is not None:
827
+ label.set_fontsize(fontsize)
828
+
829
+ @final
830
+ @property
831
+ def legend_title(self) -> str | None:
832
+ if not isinstance(self.data.columns, ABCMultiIndex):
833
+ name = self.data.columns.name
834
+ if name is not None:
835
+ name = pprint_thing(name)
836
+ return name
837
+ else:
838
+ stringified = map(pprint_thing, self.data.columns.names)
839
+ return ",".join(stringified)
840
+
841
+ @final
842
+ def _mark_right_label(self, label: str, index: int) -> str:
843
+ """
844
+ Append ``(right)`` to the label of a line if it's plotted on the right axis.
845
+
846
+ Note that ``(right)`` is only appended when ``subplots=False``.
847
+ """
848
+ if not self.subplots and self.mark_right and self.on_right(index):
849
+ label += " (right)"
850
+ return label
851
+
852
+ @final
853
+ def _append_legend_handles_labels(self, handle: Artist, label: str) -> None:
854
+ """
855
+ Append current handle and label to ``legend_handles`` and ``legend_labels``.
856
+
857
+ These will be used to make the legend.
858
+ """
859
+ self.legend_handles.append(handle)
860
+ self.legend_labels.append(label)
861
+
862
+ def _make_legend(self) -> None:
863
+ ax, leg = self._get_ax_legend(self.axes[0])
864
+
865
+ handles = []
866
+ labels = []
867
+ title = ""
868
+
869
+ if not self.subplots:
870
+ if leg is not None:
871
+ title = leg.get_title().get_text()
872
+ # Replace leg.legend_handles because it misses marker info
873
+ if Version(mpl.__version__) < Version("3.7"):
874
+ handles = leg.legendHandles
875
+ else:
876
+ handles = leg.legend_handles
877
+ labels = [x.get_text() for x in leg.get_texts()]
878
+
879
+ if self.legend:
880
+ if self.legend == "reverse":
881
+ handles += reversed(self.legend_handles)
882
+ labels += reversed(self.legend_labels)
883
+ else:
884
+ handles += self.legend_handles
885
+ labels += self.legend_labels
886
+
887
+ if self.legend_title is not None:
888
+ title = self.legend_title
889
+
890
+ if len(handles) > 0:
891
+ ax.legend(handles, labels, loc="best", title=title)
892
+
893
+ elif self.subplots and self.legend:
894
+ for ax in self.axes:
895
+ if ax.get_visible():
896
+ with warnings.catch_warnings():
897
+ warnings.filterwarnings(
898
+ "ignore",
899
+ "No artists with labels found to put in legend.",
900
+ UserWarning,
901
+ )
902
+ ax.legend(loc="best")
903
+
904
+ @final
905
+ @staticmethod
906
+ def _get_ax_legend(ax: Axes):
907
+ """
908
+ Take in axes and return ax and legend under different scenarios
909
+ """
910
+ leg = ax.get_legend()
911
+
912
+ other_ax = getattr(ax, "left_ax", None) or getattr(ax, "right_ax", None)
913
+ other_leg = None
914
+ if other_ax is not None:
915
+ other_leg = other_ax.get_legend()
916
+ if leg is None and other_leg is not None:
917
+ leg = other_leg
918
+ ax = other_ax
919
+ return ax, leg
920
+
921
+ @final
922
+ @cache_readonly
923
+ def plt(self):
924
+ import matplotlib.pyplot as plt
925
+
926
+ return plt
927
+
928
+ _need_to_set_index = False
929
+
930
+ @final
931
+ def _get_xticks(self):
932
+ index = self.data.index
933
+ is_datetype = index.inferred_type in ("datetime", "date", "datetime64", "time")
934
+
935
+ # TODO: be stricter about x?
936
+ x: list[int] | np.ndarray
937
+ if self.use_index:
938
+ if isinstance(index, ABCPeriodIndex):
939
+ # test_mixed_freq_irreg_period
940
+ x = index.to_timestamp()._mpl_repr()
941
+ # TODO: why do we need to do to_timestamp() here but not other
942
+ # places where we call mpl_repr?
943
+ elif is_any_real_numeric_dtype(index.dtype):
944
+ # Matplotlib supports numeric values or datetime objects as
945
+ # xaxis values. Taking LBYL approach here, by the time
946
+ # matplotlib raises exception when using non numeric/datetime
947
+ # values for xaxis, several actions are already taken by plt.
948
+ x = index._mpl_repr()
949
+ elif isinstance(index, ABCDatetimeIndex) or is_datetype:
950
+ x = index._mpl_repr()
951
+ else:
952
+ self._need_to_set_index = True
953
+ x = list(range(len(index)))
954
+ else:
955
+ x = list(range(len(index)))
956
+
957
+ return x
958
+
959
+ @classmethod
960
+ @register_pandas_matplotlib_converters
961
+ def _plot(
962
+ cls, ax: Axes, x, y: np.ndarray, style=None, is_errorbar: bool = False, **kwds
963
+ ):
964
+ mask = isna(y)
965
+ if mask.any():
966
+ y = np.ma.array(y)
967
+ y = np.ma.masked_where(mask, y)
968
+
969
+ if isinstance(x, ABCIndex):
970
+ x = x._mpl_repr()
971
+
972
+ if is_errorbar:
973
+ if "xerr" in kwds:
974
+ kwds["xerr"] = np.array(kwds.get("xerr"))
975
+ if "yerr" in kwds:
976
+ kwds["yerr"] = np.array(kwds.get("yerr"))
977
+ return ax.errorbar(x, y, **kwds)
978
+ else:
979
+ # prevent style kwarg from going to errorbar, where it is unsupported
980
+ args = (x, y, style) if style is not None else (x, y)
981
+ return ax.plot(*args, **kwds)
982
+
983
+ def _get_custom_index_name(self):
984
+ """Specify whether xlabel/ylabel should be used to override index name"""
985
+ return self.xlabel
986
+
987
+ @final
988
+ def _get_index_name(self) -> str | None:
989
+ if isinstance(self.data.index, ABCMultiIndex):
990
+ name = self.data.index.names
991
+ if com.any_not_none(*name):
992
+ name = ",".join([pprint_thing(x) for x in name])
993
+ else:
994
+ name = None
995
+ else:
996
+ name = self.data.index.name
997
+ if name is not None:
998
+ name = pprint_thing(name)
999
+
1000
+ # GH 45145, override the default axis label if one is provided.
1001
+ index_name = self._get_custom_index_name()
1002
+ if index_name is not None:
1003
+ name = pprint_thing(index_name)
1004
+
1005
+ return name
1006
+
1007
+ @final
1008
+ @classmethod
1009
+ def _get_ax_layer(cls, ax, primary: bool = True):
1010
+ """get left (primary) or right (secondary) axes"""
1011
+ if primary:
1012
+ return getattr(ax, "left_ax", ax)
1013
+ else:
1014
+ return getattr(ax, "right_ax", ax)
1015
+
1016
+ @final
1017
+ def _col_idx_to_axis_idx(self, col_idx: int) -> int:
1018
+ """Return the index of the axis where the column at col_idx should be plotted"""
1019
+ if isinstance(self.subplots, list):
1020
+ # Subplots is a list: some columns will be grouped together in the same ax
1021
+ return next(
1022
+ group_idx
1023
+ for (group_idx, group) in enumerate(self.subplots)
1024
+ if col_idx in group
1025
+ )
1026
+ else:
1027
+ # subplots is True: one ax per column
1028
+ return col_idx
1029
+
1030
+ @final
1031
+ def _get_ax(self, i: int):
1032
+ # get the twinx ax if appropriate
1033
+ if self.subplots:
1034
+ i = self._col_idx_to_axis_idx(i)
1035
+ ax = self.axes[i]
1036
+ ax = self._maybe_right_yaxis(ax, i)
1037
+ # error: Unsupported target for indexed assignment ("Sequence[Any]")
1038
+ self.axes[i] = ax # type: ignore[index]
1039
+ else:
1040
+ ax = self.axes[0]
1041
+ ax = self._maybe_right_yaxis(ax, i)
1042
+
1043
+ ax.get_yaxis().set_visible(True)
1044
+ return ax
1045
+
1046
+ @final
1047
+ def on_right(self, i: int):
1048
+ if isinstance(self.secondary_y, bool):
1049
+ return self.secondary_y
1050
+
1051
+ if isinstance(self.secondary_y, (tuple, list, np.ndarray, ABCIndex)):
1052
+ return self.data.columns[i] in self.secondary_y
1053
+
1054
+ @final
1055
+ def _apply_style_colors(
1056
+ self, colors, kwds: dict[str, Any], col_num: int, label: str
1057
+ ):
1058
+ """
1059
+ Manage style and color based on column number and its label.
1060
+ Returns tuple of appropriate style and kwds which "color" may be added.
1061
+ """
1062
+ style = None
1063
+ if self.style is not None:
1064
+ if isinstance(self.style, list):
1065
+ try:
1066
+ style = self.style[col_num]
1067
+ except IndexError:
1068
+ pass
1069
+ elif isinstance(self.style, dict):
1070
+ style = self.style.get(label, style)
1071
+ else:
1072
+ style = self.style
1073
+
1074
+ has_color = "color" in kwds or self.colormap is not None
1075
+ nocolor_style = style is None or not _color_in_style(style)
1076
+ if (has_color or self.subplots) and nocolor_style:
1077
+ if isinstance(colors, dict):
1078
+ kwds["color"] = colors[label]
1079
+ else:
1080
+ kwds["color"] = colors[col_num % len(colors)]
1081
+ return style, kwds
1082
+
1083
+ def _get_colors(
1084
+ self,
1085
+ num_colors: int | None = None,
1086
+ color_kwds: str = "color",
1087
+ ):
1088
+ if num_colors is None:
1089
+ num_colors = self.nseries
1090
+ if color_kwds == "color":
1091
+ color = self.color
1092
+ else:
1093
+ color = self.kwds.get(color_kwds)
1094
+ return get_standard_colors(
1095
+ num_colors=num_colors,
1096
+ colormap=self.colormap,
1097
+ color=color,
1098
+ )
1099
+
1100
+ # TODO: tighter typing for first return?
1101
+ @final
1102
+ @staticmethod
1103
+ def _parse_errorbars(
1104
+ label: str, err, data: NDFrameT, nseries: int
1105
+ ) -> tuple[Any, NDFrameT]:
1106
+ """
1107
+ Look for error keyword arguments and return the actual errorbar data
1108
+ or return the error DataFrame/dict
1109
+
1110
+ Error bars can be specified in several ways:
1111
+ Series: the user provides a pandas.Series object of the same
1112
+ length as the data
1113
+ ndarray: provides a np.ndarray of the same length as the data
1114
+ DataFrame/dict: error values are paired with keys matching the
1115
+ key in the plotted DataFrame
1116
+ str: the name of the column within the plotted DataFrame
1117
+
1118
+ Asymmetrical error bars are also supported, however raw error values
1119
+ must be provided in this case. For a ``N`` length :class:`Series`, a
1120
+ ``2xN`` array should be provided indicating lower and upper (or left
1121
+ and right) errors. For a ``MxN`` :class:`DataFrame`, asymmetrical errors
1122
+ should be in a ``Mx2xN`` array.
1123
+ """
1124
+ if err is None:
1125
+ return None, data
1126
+
1127
+ def match_labels(data, e):
1128
+ e = e.reindex(data.index)
1129
+ return e
1130
+
1131
+ # key-matched DataFrame
1132
+ if isinstance(err, ABCDataFrame):
1133
+ err = match_labels(data, err)
1134
+ # key-matched dict
1135
+ elif isinstance(err, dict):
1136
+ pass
1137
+
1138
+ # Series of error values
1139
+ elif isinstance(err, ABCSeries):
1140
+ # broadcast error series across data
1141
+ err = match_labels(data, err)
1142
+ err = np.atleast_2d(err)
1143
+ err = np.tile(err, (nseries, 1))
1144
+
1145
+ # errors are a column in the dataframe
1146
+ elif isinstance(err, str):
1147
+ evalues = data[err].values
1148
+ data = data[data.columns.drop(err)]
1149
+ err = np.atleast_2d(evalues)
1150
+ err = np.tile(err, (nseries, 1))
1151
+
1152
+ elif is_list_like(err):
1153
+ if is_iterator(err):
1154
+ err = np.atleast_2d(list(err))
1155
+ else:
1156
+ # raw error values
1157
+ err = np.atleast_2d(err)
1158
+
1159
+ err_shape = err.shape
1160
+
1161
+ # asymmetrical error bars
1162
+ if isinstance(data, ABCSeries) and err_shape[0] == 2:
1163
+ err = np.expand_dims(err, 0)
1164
+ err_shape = err.shape
1165
+ if err_shape[2] != len(data):
1166
+ raise ValueError(
1167
+ "Asymmetrical error bars should be provided "
1168
+ f"with the shape (2, {len(data)})"
1169
+ )
1170
+ elif isinstance(data, ABCDataFrame) and err.ndim == 3:
1171
+ if (
1172
+ (err_shape[0] != nseries)
1173
+ or (err_shape[1] != 2)
1174
+ or (err_shape[2] != len(data))
1175
+ ):
1176
+ raise ValueError(
1177
+ "Asymmetrical error bars should be provided "
1178
+ f"with the shape ({nseries}, 2, {len(data)})"
1179
+ )
1180
+
1181
+ # broadcast errors to each data series
1182
+ if len(err) == 1:
1183
+ err = np.tile(err, (nseries, 1))
1184
+
1185
+ elif is_number(err):
1186
+ err = np.tile(
1187
+ [err],
1188
+ (nseries, len(data)),
1189
+ )
1190
+
1191
+ else:
1192
+ msg = f"No valid {label} detected"
1193
+ raise ValueError(msg)
1194
+
1195
+ return err, data
1196
+
1197
+ @final
1198
+ def _get_errorbars(
1199
+ self, label=None, index=None, xerr: bool = True, yerr: bool = True
1200
+ ) -> dict[str, Any]:
1201
+ errors = {}
1202
+
1203
+ for kw, flag in zip(["xerr", "yerr"], [xerr, yerr]):
1204
+ if flag:
1205
+ err = self.errors[kw]
1206
+ # user provided label-matched dataframe of errors
1207
+ if isinstance(err, (ABCDataFrame, dict)):
1208
+ if label is not None and label in err.keys():
1209
+ err = err[label]
1210
+ else:
1211
+ err = None
1212
+ elif index is not None and err is not None:
1213
+ err = err[index]
1214
+
1215
+ if err is not None:
1216
+ errors[kw] = err
1217
+ return errors
1218
+
1219
+ @final
1220
+ def _get_subplots(self, fig: Figure):
1221
+ if Version(mpl.__version__) < Version("3.8"):
1222
+ from matplotlib.axes import Subplot as Klass
1223
+ else:
1224
+ from matplotlib.axes import Axes as Klass
1225
+
1226
+ return [
1227
+ ax
1228
+ for ax in fig.get_axes()
1229
+ if (isinstance(ax, Klass) and ax.get_subplotspec() is not None)
1230
+ ]
1231
+
1232
+ @final
1233
+ def _get_axes_layout(self, fig: Figure) -> tuple[int, int]:
1234
+ axes = self._get_subplots(fig)
1235
+ x_set = set()
1236
+ y_set = set()
1237
+ for ax in axes:
1238
+ # check axes coordinates to estimate layout
1239
+ points = ax.get_position().get_points()
1240
+ x_set.add(points[0][0])
1241
+ y_set.add(points[0][1])
1242
+ return (len(y_set), len(x_set))
1243
+
1244
+
1245
+ class PlanePlot(MPLPlot, ABC):
1246
+ """
1247
+ Abstract class for plotting on plane, currently scatter and hexbin.
1248
+ """
1249
+
1250
+ _layout_type = "single"
1251
+
1252
+ def __init__(self, data, x, y, **kwargs) -> None:
1253
+ MPLPlot.__init__(self, data, **kwargs)
1254
+ if x is None or y is None:
1255
+ raise ValueError(self._kind + " requires an x and y column")
1256
+ if is_integer(x) and not self.data.columns._holds_integer():
1257
+ x = self.data.columns[x]
1258
+ if is_integer(y) and not self.data.columns._holds_integer():
1259
+ y = self.data.columns[y]
1260
+
1261
+ self.x = x
1262
+ self.y = y
1263
+
1264
+ @final
1265
+ def _get_nseries(self, data: Series | DataFrame) -> int:
1266
+ return 1
1267
+
1268
+ @final
1269
+ def _post_plot_logic(self, ax: Axes, data) -> None:
1270
+ x, y = self.x, self.y
1271
+ xlabel = self.xlabel if self.xlabel is not None else pprint_thing(x)
1272
+ ylabel = self.ylabel if self.ylabel is not None else pprint_thing(y)
1273
+ # error: Argument 1 to "set_xlabel" of "_AxesBase" has incompatible
1274
+ # type "Hashable"; expected "str"
1275
+ ax.set_xlabel(xlabel) # type: ignore[arg-type]
1276
+ ax.set_ylabel(ylabel) # type: ignore[arg-type]
1277
+
1278
+ @final
1279
+ def _plot_colorbar(self, ax: Axes, *, fig: Figure, **kwds):
1280
+ # Addresses issues #10611 and #10678:
1281
+ # When plotting scatterplots and hexbinplots in IPython
1282
+ # inline backend the colorbar axis height tends not to
1283
+ # exactly match the parent axis height.
1284
+ # The difference is due to small fractional differences
1285
+ # in floating points with similar representation.
1286
+ # To deal with this, this method forces the colorbar
1287
+ # height to take the height of the parent axes.
1288
+ # For a more detailed description of the issue
1289
+ # see the following link:
1290
+ # https://github.com/ipython/ipython/issues/11215
1291
+
1292
+ # GH33389, if ax is used multiple times, we should always
1293
+ # use the last one which contains the latest information
1294
+ # about the ax
1295
+ img = ax.collections[-1]
1296
+ return fig.colorbar(img, ax=ax, **kwds)
1297
+
1298
+
1299
+ class ScatterPlot(PlanePlot):
1300
+ @property
1301
+ def _kind(self) -> Literal["scatter"]:
1302
+ return "scatter"
1303
+
1304
+ def __init__(
1305
+ self,
1306
+ data,
1307
+ x,
1308
+ y,
1309
+ s=None,
1310
+ c=None,
1311
+ *,
1312
+ colorbar: bool | lib.NoDefault = lib.no_default,
1313
+ norm=None,
1314
+ **kwargs,
1315
+ ) -> None:
1316
+ if s is None:
1317
+ # hide the matplotlib default for size, in case we want to change
1318
+ # the handling of this argument later
1319
+ s = 20
1320
+ elif is_hashable(s) and s in data.columns:
1321
+ s = data[s]
1322
+ self.s = s
1323
+
1324
+ self.colorbar = colorbar
1325
+ self.norm = norm
1326
+
1327
+ super().__init__(data, x, y, **kwargs)
1328
+ if is_integer(c) and not self.data.columns._holds_integer():
1329
+ c = self.data.columns[c]
1330
+ self.c = c
1331
+
1332
+ def _make_plot(self, fig: Figure) -> None:
1333
+ x, y, c, data = self.x, self.y, self.c, self.data
1334
+ ax = self.axes[0]
1335
+
1336
+ c_is_column = is_hashable(c) and c in self.data.columns
1337
+
1338
+ color_by_categorical = c_is_column and isinstance(
1339
+ self.data[c].dtype, CategoricalDtype
1340
+ )
1341
+
1342
+ color = self.color
1343
+ c_values = self._get_c_values(color, color_by_categorical, c_is_column)
1344
+ norm, cmap = self._get_norm_and_cmap(c_values, color_by_categorical)
1345
+ cb = self._get_colorbar(c_values, c_is_column)
1346
+
1347
+ if self.legend:
1348
+ label = self.label
1349
+ else:
1350
+ label = None
1351
+ scatter = ax.scatter(
1352
+ data[x].values,
1353
+ data[y].values,
1354
+ c=c_values,
1355
+ label=label,
1356
+ cmap=cmap,
1357
+ norm=norm,
1358
+ s=self.s,
1359
+ **self.kwds,
1360
+ )
1361
+ if cb:
1362
+ cbar_label = c if c_is_column else ""
1363
+ cbar = self._plot_colorbar(ax, fig=fig, label=cbar_label)
1364
+ if color_by_categorical:
1365
+ n_cats = len(self.data[c].cat.categories)
1366
+ cbar.set_ticks(np.linspace(0.5, n_cats - 0.5, n_cats))
1367
+ cbar.ax.set_yticklabels(self.data[c].cat.categories)
1368
+
1369
+ if label is not None:
1370
+ self._append_legend_handles_labels(
1371
+ # error: Argument 2 to "_append_legend_handles_labels" of
1372
+ # "MPLPlot" has incompatible type "Hashable"; expected "str"
1373
+ scatter,
1374
+ label, # type: ignore[arg-type]
1375
+ )
1376
+
1377
+ errors_x = self._get_errorbars(label=x, index=0, yerr=False)
1378
+ errors_y = self._get_errorbars(label=y, index=0, xerr=False)
1379
+ if len(errors_x) > 0 or len(errors_y) > 0:
1380
+ err_kwds = dict(errors_x, **errors_y)
1381
+ err_kwds["ecolor"] = scatter.get_facecolor()[0]
1382
+ ax.errorbar(data[x].values, data[y].values, linestyle="none", **err_kwds)
1383
+
1384
+ def _get_c_values(self, color, color_by_categorical: bool, c_is_column: bool):
1385
+ c = self.c
1386
+ if c is not None and color is not None:
1387
+ raise TypeError("Specify exactly one of `c` and `color`")
1388
+ if c is None and color is None:
1389
+ c_values = self.plt.rcParams["patch.facecolor"]
1390
+ elif color is not None:
1391
+ c_values = color
1392
+ elif color_by_categorical:
1393
+ c_values = self.data[c].cat.codes
1394
+ elif c_is_column:
1395
+ c_values = self.data[c].values
1396
+ else:
1397
+ c_values = c
1398
+ return c_values
1399
+
1400
+ def _get_norm_and_cmap(self, c_values, color_by_categorical: bool):
1401
+ c = self.c
1402
+ if self.colormap is not None:
1403
+ cmap = mpl.colormaps.get_cmap(self.colormap)
1404
+ # cmap is only used if c_values are integers, otherwise UserWarning.
1405
+ # GH-53908: additionally call isinstance() because is_integer_dtype
1406
+ # returns True for "b" (meaning "blue" and not int8 in this context)
1407
+ elif not isinstance(c_values, str) and is_integer_dtype(c_values):
1408
+ # pandas uses colormap, matplotlib uses cmap.
1409
+ cmap = mpl.colormaps["Greys"]
1410
+ else:
1411
+ cmap = None
1412
+
1413
+ if color_by_categorical and cmap is not None:
1414
+ from matplotlib import colors
1415
+
1416
+ n_cats = len(self.data[c].cat.categories)
1417
+ cmap = colors.ListedColormap([cmap(i) for i in range(cmap.N)])
1418
+ bounds = np.linspace(0, n_cats, n_cats + 1)
1419
+ norm = colors.BoundaryNorm(bounds, cmap.N)
1420
+ # TODO: warn that we are ignoring self.norm if user specified it?
1421
+ # Doesn't happen in any tests 2023-11-09
1422
+ else:
1423
+ norm = self.norm
1424
+ return norm, cmap
1425
+
1426
+ def _get_colorbar(self, c_values, c_is_column: bool) -> bool:
1427
+ # plot colorbar if
1428
+ # 1. colormap is assigned, and
1429
+ # 2.`c` is a column containing only numeric values
1430
+ plot_colorbar = self.colormap or c_is_column
1431
+ cb = self.colorbar
1432
+ if cb is lib.no_default:
1433
+ return is_numeric_dtype(c_values) and plot_colorbar
1434
+ return cb
1435
+
1436
+
1437
+ class HexBinPlot(PlanePlot):
1438
+ @property
1439
+ def _kind(self) -> Literal["hexbin"]:
1440
+ return "hexbin"
1441
+
1442
+ def __init__(self, data, x, y, C=None, *, colorbar: bool = True, **kwargs) -> None:
1443
+ super().__init__(data, x, y, **kwargs)
1444
+ if is_integer(C) and not self.data.columns._holds_integer():
1445
+ C = self.data.columns[C]
1446
+ self.C = C
1447
+
1448
+ self.colorbar = colorbar
1449
+
1450
+ # Scatter plot allows to plot objects data
1451
+ if len(self.data[self.x]._get_numeric_data()) == 0:
1452
+ raise ValueError(self._kind + " requires x column to be numeric")
1453
+ if len(self.data[self.y]._get_numeric_data()) == 0:
1454
+ raise ValueError(self._kind + " requires y column to be numeric")
1455
+
1456
+ def _make_plot(self, fig: Figure) -> None:
1457
+ x, y, data, C = self.x, self.y, self.data, self.C
1458
+ ax = self.axes[0]
1459
+ # pandas uses colormap, matplotlib uses cmap.
1460
+ cmap = self.colormap or "BuGn"
1461
+ cmap = mpl.colormaps.get_cmap(cmap)
1462
+ cb = self.colorbar
1463
+
1464
+ if C is None:
1465
+ c_values = None
1466
+ else:
1467
+ c_values = data[C].values
1468
+
1469
+ ax.hexbin(data[x].values, data[y].values, C=c_values, cmap=cmap, **self.kwds)
1470
+ if cb:
1471
+ self._plot_colorbar(ax, fig=fig)
1472
+
1473
+ def _make_legend(self) -> None:
1474
+ pass
1475
+
1476
+
1477
+ class LinePlot(MPLPlot):
1478
+ _default_rot = 0
1479
+
1480
+ @property
1481
+ def orientation(self) -> PlottingOrientation:
1482
+ return "vertical"
1483
+
1484
+ @property
1485
+ def _kind(self) -> Literal["line", "area", "hist", "kde", "box"]:
1486
+ return "line"
1487
+
1488
+ def __init__(self, data, **kwargs) -> None:
1489
+ from pandas.plotting import plot_params
1490
+
1491
+ MPLPlot.__init__(self, data, **kwargs)
1492
+ if self.stacked:
1493
+ self.data = self.data.fillna(value=0)
1494
+ self.x_compat = plot_params["x_compat"]
1495
+ if "x_compat" in self.kwds:
1496
+ self.x_compat = bool(self.kwds.pop("x_compat"))
1497
+
1498
+ @final
1499
+ def _is_ts_plot(self) -> bool:
1500
+ # this is slightly deceptive
1501
+ return not self.x_compat and self.use_index and self._use_dynamic_x()
1502
+
1503
+ @final
1504
+ def _use_dynamic_x(self) -> bool:
1505
+ return use_dynamic_x(self._get_ax(0), self.data)
1506
+
1507
+ def _make_plot(self, fig: Figure) -> None:
1508
+ if self._is_ts_plot():
1509
+ data = maybe_convert_index(self._get_ax(0), self.data)
1510
+
1511
+ x = data.index # dummy, not used
1512
+ plotf = self._ts_plot
1513
+ it = data.items()
1514
+ else:
1515
+ x = self._get_xticks()
1516
+ # error: Incompatible types in assignment (expression has type
1517
+ # "Callable[[Any, Any, Any, Any, Any, Any, KwArg(Any)], Any]", variable has
1518
+ # type "Callable[[Any, Any, Any, Any, KwArg(Any)], Any]")
1519
+ plotf = self._plot # type: ignore[assignment]
1520
+ # error: Incompatible types in assignment (expression has type
1521
+ # "Iterator[tuple[Hashable, ndarray[Any, Any]]]", variable has
1522
+ # type "Iterable[tuple[Hashable, Series]]")
1523
+ it = self._iter_data(data=self.data) # type: ignore[assignment]
1524
+
1525
+ stacking_id = self._get_stacking_id()
1526
+ is_errorbar = com.any_not_none(*self.errors.values())
1527
+
1528
+ colors = self._get_colors()
1529
+ for i, (label, y) in enumerate(it):
1530
+ ax = self._get_ax(i)
1531
+ kwds = self.kwds.copy()
1532
+ if self.color is not None:
1533
+ kwds["color"] = self.color
1534
+ style, kwds = self._apply_style_colors(
1535
+ colors,
1536
+ kwds,
1537
+ i,
1538
+ # error: Argument 4 to "_apply_style_colors" of "MPLPlot" has
1539
+ # incompatible type "Hashable"; expected "str"
1540
+ label, # type: ignore[arg-type]
1541
+ )
1542
+
1543
+ errors = self._get_errorbars(label=label, index=i)
1544
+ kwds = dict(kwds, **errors)
1545
+
1546
+ label = pprint_thing(label)
1547
+ label = self._mark_right_label(label, index=i)
1548
+ kwds["label"] = label
1549
+
1550
+ newlines = plotf(
1551
+ ax,
1552
+ x,
1553
+ y,
1554
+ style=style,
1555
+ column_num=i,
1556
+ stacking_id=stacking_id,
1557
+ is_errorbar=is_errorbar,
1558
+ **kwds,
1559
+ )
1560
+ self._append_legend_handles_labels(newlines[0], label)
1561
+
1562
+ if self._is_ts_plot():
1563
+ # reset of xlim should be used for ts data
1564
+ # TODO: GH28021, should find a way to change view limit on xaxis
1565
+ lines = get_all_lines(ax)
1566
+ left, right = get_xlim(lines)
1567
+ ax.set_xlim(left, right)
1568
+
1569
+ # error: Signature of "_plot" incompatible with supertype "MPLPlot"
1570
+ @classmethod
1571
+ def _plot( # type: ignore[override]
1572
+ cls,
1573
+ ax: Axes,
1574
+ x,
1575
+ y: np.ndarray,
1576
+ style=None,
1577
+ column_num=None,
1578
+ stacking_id=None,
1579
+ **kwds,
1580
+ ):
1581
+ # column_num is used to get the target column from plotf in line and
1582
+ # area plots
1583
+ if column_num == 0:
1584
+ cls._initialize_stacker(ax, stacking_id, len(y))
1585
+ y_values = cls._get_stacked_values(ax, stacking_id, y, kwds["label"])
1586
+ lines = MPLPlot._plot(ax, x, y_values, style=style, **kwds)
1587
+ cls._update_stacker(ax, stacking_id, y)
1588
+ return lines
1589
+
1590
+ @final
1591
+ def _ts_plot(self, ax: Axes, x, data: Series, style=None, **kwds):
1592
+ # accept x to be consistent with normal plot func,
1593
+ # x is not passed to tsplot as it uses data.index as x coordinate
1594
+ # column_num must be in kwds for stacking purpose
1595
+ freq, data = maybe_resample(data, ax, kwds)
1596
+
1597
+ # Set ax with freq info
1598
+ decorate_axes(ax, freq)
1599
+ # digging deeper
1600
+ if hasattr(ax, "left_ax"):
1601
+ decorate_axes(ax.left_ax, freq)
1602
+ if hasattr(ax, "right_ax"):
1603
+ decorate_axes(ax.right_ax, freq)
1604
+ # TODO #54485
1605
+ ax._plot_data.append((data, self._kind, kwds)) # type: ignore[attr-defined]
1606
+
1607
+ lines = self._plot(ax, data.index, np.asarray(data.values), style=style, **kwds)
1608
+ # set date formatter, locators and rescale limits
1609
+ # TODO #54485
1610
+ format_dateaxis(ax, ax.freq, data.index) # type: ignore[arg-type, attr-defined]
1611
+ return lines
1612
+
1613
+ @final
1614
+ def _get_stacking_id(self) -> int | None:
1615
+ if self.stacked:
1616
+ return id(self.data)
1617
+ else:
1618
+ return None
1619
+
1620
+ @final
1621
+ @classmethod
1622
+ def _initialize_stacker(cls, ax: Axes, stacking_id, n: int) -> None:
1623
+ if stacking_id is None:
1624
+ return
1625
+ if not hasattr(ax, "_stacker_pos_prior"):
1626
+ # TODO #54485
1627
+ ax._stacker_pos_prior = {} # type: ignore[attr-defined]
1628
+ if not hasattr(ax, "_stacker_neg_prior"):
1629
+ # TODO #54485
1630
+ ax._stacker_neg_prior = {} # type: ignore[attr-defined]
1631
+ # TODO #54485
1632
+ ax._stacker_pos_prior[stacking_id] = np.zeros(n) # type: ignore[attr-defined]
1633
+ # TODO #54485
1634
+ ax._stacker_neg_prior[stacking_id] = np.zeros(n) # type: ignore[attr-defined]
1635
+
1636
+ @final
1637
+ @classmethod
1638
+ def _get_stacked_values(
1639
+ cls, ax: Axes, stacking_id: int | None, values: np.ndarray, label
1640
+ ) -> np.ndarray:
1641
+ if stacking_id is None:
1642
+ return values
1643
+ if not hasattr(ax, "_stacker_pos_prior"):
1644
+ # stacker may not be initialized for subplots
1645
+ cls._initialize_stacker(ax, stacking_id, len(values))
1646
+
1647
+ if (values >= 0).all():
1648
+ # TODO #54485
1649
+ return (
1650
+ ax._stacker_pos_prior[stacking_id] # type: ignore[attr-defined]
1651
+ + values
1652
+ )
1653
+ elif (values <= 0).all():
1654
+ # TODO #54485
1655
+ return (
1656
+ ax._stacker_neg_prior[stacking_id] # type: ignore[attr-defined]
1657
+ + values
1658
+ )
1659
+
1660
+ raise ValueError(
1661
+ "When stacked is True, each column must be either "
1662
+ "all positive or all negative. "
1663
+ f"Column '{label}' contains both positive and negative values"
1664
+ )
1665
+
1666
+ @final
1667
+ @classmethod
1668
+ def _update_stacker(cls, ax: Axes, stacking_id: int | None, values) -> None:
1669
+ if stacking_id is None:
1670
+ return
1671
+ if (values >= 0).all():
1672
+ # TODO #54485
1673
+ ax._stacker_pos_prior[stacking_id] += values # type: ignore[attr-defined]
1674
+ elif (values <= 0).all():
1675
+ # TODO #54485
1676
+ ax._stacker_neg_prior[stacking_id] += values # type: ignore[attr-defined]
1677
+
1678
+ def _post_plot_logic(self, ax: Axes, data) -> None:
1679
+ from matplotlib.ticker import FixedLocator
1680
+
1681
+ def get_label(i):
1682
+ if is_float(i) and i.is_integer():
1683
+ i = int(i)
1684
+ try:
1685
+ return pprint_thing(data.index[i])
1686
+ except Exception:
1687
+ return ""
1688
+
1689
+ if self._need_to_set_index:
1690
+ xticks = ax.get_xticks()
1691
+ xticklabels = [get_label(x) for x in xticks]
1692
+ # error: Argument 1 to "FixedLocator" has incompatible type "ndarray[Any,
1693
+ # Any]"; expected "Sequence[float]"
1694
+ ax.xaxis.set_major_locator(FixedLocator(xticks)) # type: ignore[arg-type]
1695
+ ax.set_xticklabels(xticklabels)
1696
+
1697
+ # If the index is an irregular time series, then by default
1698
+ # we rotate the tick labels. The exception is if there are
1699
+ # subplots which don't share their x-axes, in which we case
1700
+ # we don't rotate the ticklabels as by default the subplots
1701
+ # would be too close together.
1702
+ condition = (
1703
+ not self._use_dynamic_x()
1704
+ and (data.index._is_all_dates and self.use_index)
1705
+ and (not self.subplots or (self.subplots and self.sharex))
1706
+ )
1707
+
1708
+ index_name = self._get_index_name()
1709
+
1710
+ if condition:
1711
+ # irregular TS rotated 30 deg. by default
1712
+ # probably a better place to check / set this.
1713
+ if not self._rot_set:
1714
+ self.rot = 30
1715
+ format_date_labels(ax, rot=self.rot)
1716
+
1717
+ if index_name is not None and self.use_index:
1718
+ ax.set_xlabel(index_name)
1719
+
1720
+
1721
+ class AreaPlot(LinePlot):
1722
+ @property
1723
+ def _kind(self) -> Literal["area"]:
1724
+ return "area"
1725
+
1726
+ def __init__(self, data, **kwargs) -> None:
1727
+ kwargs.setdefault("stacked", True)
1728
+ with warnings.catch_warnings():
1729
+ warnings.filterwarnings(
1730
+ "ignore",
1731
+ "Downcasting object dtype arrays",
1732
+ category=FutureWarning,
1733
+ )
1734
+ data = data.fillna(value=0)
1735
+ LinePlot.__init__(self, data, **kwargs)
1736
+
1737
+ if not self.stacked:
1738
+ # use smaller alpha to distinguish overlap
1739
+ self.kwds.setdefault("alpha", 0.5)
1740
+
1741
+ if self.logy or self.loglog:
1742
+ raise ValueError("Log-y scales are not supported in area plot")
1743
+
1744
+ # error: Signature of "_plot" incompatible with supertype "MPLPlot"
1745
+ @classmethod
1746
+ def _plot( # type: ignore[override]
1747
+ cls,
1748
+ ax: Axes,
1749
+ x,
1750
+ y: np.ndarray,
1751
+ style=None,
1752
+ column_num=None,
1753
+ stacking_id=None,
1754
+ is_errorbar: bool = False,
1755
+ **kwds,
1756
+ ):
1757
+ if column_num == 0:
1758
+ cls._initialize_stacker(ax, stacking_id, len(y))
1759
+ y_values = cls._get_stacked_values(ax, stacking_id, y, kwds["label"])
1760
+
1761
+ # need to remove label, because subplots uses mpl legend as it is
1762
+ line_kwds = kwds.copy()
1763
+ line_kwds.pop("label")
1764
+ lines = MPLPlot._plot(ax, x, y_values, style=style, **line_kwds)
1765
+
1766
+ # get data from the line to get coordinates for fill_between
1767
+ xdata, y_values = lines[0].get_data(orig=False)
1768
+
1769
+ # unable to use ``_get_stacked_values`` here to get starting point
1770
+ if stacking_id is None:
1771
+ start = np.zeros(len(y))
1772
+ elif (y >= 0).all():
1773
+ # TODO #54485
1774
+ start = ax._stacker_pos_prior[stacking_id] # type: ignore[attr-defined]
1775
+ elif (y <= 0).all():
1776
+ # TODO #54485
1777
+ start = ax._stacker_neg_prior[stacking_id] # type: ignore[attr-defined]
1778
+ else:
1779
+ start = np.zeros(len(y))
1780
+
1781
+ if "color" not in kwds:
1782
+ kwds["color"] = lines[0].get_color()
1783
+
1784
+ rect = ax.fill_between(xdata, start, y_values, **kwds)
1785
+ cls._update_stacker(ax, stacking_id, y)
1786
+
1787
+ # LinePlot expects list of artists
1788
+ res = [rect]
1789
+ return res
1790
+
1791
+ def _post_plot_logic(self, ax: Axes, data) -> None:
1792
+ LinePlot._post_plot_logic(self, ax, data)
1793
+
1794
+ is_shared_y = len(list(ax.get_shared_y_axes())) > 0
1795
+ # do not override the default axis behaviour in case of shared y axes
1796
+ if self.ylim is None and not is_shared_y:
1797
+ if (data >= 0).all().all():
1798
+ ax.set_ylim(0, None)
1799
+ elif (data <= 0).all().all():
1800
+ ax.set_ylim(None, 0)
1801
+
1802
+
1803
+ class BarPlot(MPLPlot):
1804
+ @property
1805
+ def _kind(self) -> Literal["bar", "barh"]:
1806
+ return "bar"
1807
+
1808
+ _default_rot = 90
1809
+
1810
+ @property
1811
+ def orientation(self) -> PlottingOrientation:
1812
+ return "vertical"
1813
+
1814
+ def __init__(
1815
+ self,
1816
+ data,
1817
+ *,
1818
+ align="center",
1819
+ bottom=0,
1820
+ left=0,
1821
+ width=0.5,
1822
+ position=0.5,
1823
+ log=False,
1824
+ **kwargs,
1825
+ ) -> None:
1826
+ # we have to treat a series differently than a
1827
+ # 1-column DataFrame w.r.t. color handling
1828
+ self._is_series = isinstance(data, ABCSeries)
1829
+ self.bar_width = width
1830
+ self._align = align
1831
+ self._position = position
1832
+ self.tick_pos = np.arange(len(data))
1833
+
1834
+ if is_list_like(bottom):
1835
+ bottom = np.array(bottom)
1836
+ if is_list_like(left):
1837
+ left = np.array(left)
1838
+ self.bottom = bottom
1839
+ self.left = left
1840
+
1841
+ self.log = log
1842
+
1843
+ MPLPlot.__init__(self, data, **kwargs)
1844
+
1845
+ @cache_readonly
1846
+ def ax_pos(self) -> np.ndarray:
1847
+ return self.tick_pos - self.tickoffset
1848
+
1849
+ @cache_readonly
1850
+ def tickoffset(self):
1851
+ if self.stacked or self.subplots:
1852
+ return self.bar_width * self._position
1853
+ elif self._align == "edge":
1854
+ w = self.bar_width / self.nseries
1855
+ return self.bar_width * (self._position - 0.5) + w * 0.5
1856
+ else:
1857
+ return self.bar_width * self._position
1858
+
1859
+ @cache_readonly
1860
+ def lim_offset(self):
1861
+ if self.stacked or self.subplots:
1862
+ if self._align == "edge":
1863
+ return self.bar_width / 2
1864
+ else:
1865
+ return 0
1866
+ elif self._align == "edge":
1867
+ w = self.bar_width / self.nseries
1868
+ return w * 0.5
1869
+ else:
1870
+ return 0
1871
+
1872
+ # error: Signature of "_plot" incompatible with supertype "MPLPlot"
1873
+ @classmethod
1874
+ def _plot( # type: ignore[override]
1875
+ cls,
1876
+ ax: Axes,
1877
+ x,
1878
+ y: np.ndarray,
1879
+ w,
1880
+ start: int | npt.NDArray[np.intp] = 0,
1881
+ log: bool = False,
1882
+ **kwds,
1883
+ ):
1884
+ return ax.bar(x, y, w, bottom=start, log=log, **kwds)
1885
+
1886
+ @property
1887
+ def _start_base(self):
1888
+ return self.bottom
1889
+
1890
+ def _make_plot(self, fig: Figure) -> None:
1891
+ colors = self._get_colors()
1892
+ ncolors = len(colors)
1893
+
1894
+ pos_prior = neg_prior = np.zeros(len(self.data))
1895
+ K = self.nseries
1896
+
1897
+ data = self.data.fillna(0)
1898
+ for i, (label, y) in enumerate(self._iter_data(data=data)):
1899
+ ax = self._get_ax(i)
1900
+ kwds = self.kwds.copy()
1901
+ if self._is_series:
1902
+ kwds["color"] = colors
1903
+ elif isinstance(colors, dict):
1904
+ kwds["color"] = colors[label]
1905
+ else:
1906
+ kwds["color"] = colors[i % ncolors]
1907
+
1908
+ errors = self._get_errorbars(label=label, index=i)
1909
+ kwds = dict(kwds, **errors)
1910
+
1911
+ label = pprint_thing(label)
1912
+ label = self._mark_right_label(label, index=i)
1913
+
1914
+ if (("yerr" in kwds) or ("xerr" in kwds)) and (kwds.get("ecolor") is None):
1915
+ kwds["ecolor"] = mpl.rcParams["xtick.color"]
1916
+
1917
+ start = 0
1918
+ if self.log and (y >= 1).all():
1919
+ start = 1
1920
+ start = start + self._start_base
1921
+
1922
+ kwds["align"] = self._align
1923
+ if self.subplots:
1924
+ w = self.bar_width / 2
1925
+ rect = self._plot(
1926
+ ax,
1927
+ self.ax_pos + w,
1928
+ y,
1929
+ self.bar_width,
1930
+ start=start,
1931
+ label=label,
1932
+ log=self.log,
1933
+ **kwds,
1934
+ )
1935
+ ax.set_title(label)
1936
+ elif self.stacked:
1937
+ mask = y > 0
1938
+ start = np.where(mask, pos_prior, neg_prior) + self._start_base
1939
+ w = self.bar_width / 2
1940
+ rect = self._plot(
1941
+ ax,
1942
+ self.ax_pos + w,
1943
+ y,
1944
+ self.bar_width,
1945
+ start=start,
1946
+ label=label,
1947
+ log=self.log,
1948
+ **kwds,
1949
+ )
1950
+ pos_prior = pos_prior + np.where(mask, y, 0)
1951
+ neg_prior = neg_prior + np.where(mask, 0, y)
1952
+ else:
1953
+ w = self.bar_width / K
1954
+ rect = self._plot(
1955
+ ax,
1956
+ self.ax_pos + (i + 0.5) * w,
1957
+ y,
1958
+ w,
1959
+ start=start,
1960
+ label=label,
1961
+ log=self.log,
1962
+ **kwds,
1963
+ )
1964
+ self._append_legend_handles_labels(rect, label)
1965
+
1966
+ def _post_plot_logic(self, ax: Axes, data) -> None:
1967
+ if self.use_index:
1968
+ str_index = [pprint_thing(key) for key in data.index]
1969
+ else:
1970
+ str_index = [pprint_thing(key) for key in range(data.shape[0])]
1971
+
1972
+ s_edge = self.ax_pos[0] - 0.25 + self.lim_offset
1973
+ e_edge = self.ax_pos[-1] + 0.25 + self.bar_width + self.lim_offset
1974
+
1975
+ self._decorate_ticks(ax, self._get_index_name(), str_index, s_edge, e_edge)
1976
+
1977
+ def _decorate_ticks(
1978
+ self,
1979
+ ax: Axes,
1980
+ name: str | None,
1981
+ ticklabels: list[str],
1982
+ start_edge: float,
1983
+ end_edge: float,
1984
+ ) -> None:
1985
+ ax.set_xlim((start_edge, end_edge))
1986
+
1987
+ if self.xticks is not None:
1988
+ ax.set_xticks(np.array(self.xticks))
1989
+ else:
1990
+ ax.set_xticks(self.tick_pos)
1991
+ ax.set_xticklabels(ticklabels)
1992
+
1993
+ if name is not None and self.use_index:
1994
+ ax.set_xlabel(name)
1995
+
1996
+
1997
+ class BarhPlot(BarPlot):
1998
+ @property
1999
+ def _kind(self) -> Literal["barh"]:
2000
+ return "barh"
2001
+
2002
+ _default_rot = 0
2003
+
2004
+ @property
2005
+ def orientation(self) -> Literal["horizontal"]:
2006
+ return "horizontal"
2007
+
2008
+ @property
2009
+ def _start_base(self):
2010
+ return self.left
2011
+
2012
+ # error: Signature of "_plot" incompatible with supertype "MPLPlot"
2013
+ @classmethod
2014
+ def _plot( # type: ignore[override]
2015
+ cls,
2016
+ ax: Axes,
2017
+ x,
2018
+ y: np.ndarray,
2019
+ w,
2020
+ start: int | npt.NDArray[np.intp] = 0,
2021
+ log: bool = False,
2022
+ **kwds,
2023
+ ):
2024
+ return ax.barh(x, y, w, left=start, log=log, **kwds)
2025
+
2026
+ def _get_custom_index_name(self):
2027
+ return self.ylabel
2028
+
2029
+ def _decorate_ticks(
2030
+ self,
2031
+ ax: Axes,
2032
+ name: str | None,
2033
+ ticklabels: list[str],
2034
+ start_edge: float,
2035
+ end_edge: float,
2036
+ ) -> None:
2037
+ # horizontal bars
2038
+ ax.set_ylim((start_edge, end_edge))
2039
+ ax.set_yticks(self.tick_pos)
2040
+ ax.set_yticklabels(ticklabels)
2041
+ if name is not None and self.use_index:
2042
+ ax.set_ylabel(name)
2043
+ # error: Argument 1 to "set_xlabel" of "_AxesBase" has incompatible type
2044
+ # "Hashable | None"; expected "str"
2045
+ ax.set_xlabel(self.xlabel) # type: ignore[arg-type]
2046
+
2047
+
2048
+ class PiePlot(MPLPlot):
2049
+ @property
2050
+ def _kind(self) -> Literal["pie"]:
2051
+ return "pie"
2052
+
2053
+ _layout_type = "horizontal"
2054
+
2055
+ def __init__(self, data, kind=None, **kwargs) -> None:
2056
+ data = data.fillna(value=0)
2057
+ if (data < 0).any().any():
2058
+ raise ValueError(f"{self._kind} plot doesn't allow negative values")
2059
+ MPLPlot.__init__(self, data, kind=kind, **kwargs)
2060
+
2061
+ @classmethod
2062
+ def _validate_log_kwd(
2063
+ cls,
2064
+ kwd: str,
2065
+ value: bool | None | Literal["sym"],
2066
+ ) -> bool | None | Literal["sym"]:
2067
+ super()._validate_log_kwd(kwd=kwd, value=value)
2068
+ if value is not False:
2069
+ warnings.warn(
2070
+ f"PiePlot ignores the '{kwd}' keyword",
2071
+ UserWarning,
2072
+ stacklevel=find_stack_level(),
2073
+ )
2074
+ return False
2075
+
2076
+ def _validate_color_args(self, color, colormap) -> None:
2077
+ # TODO: warn if color is passed and ignored?
2078
+ return None
2079
+
2080
+ def _make_plot(self, fig: Figure) -> None:
2081
+ colors = self._get_colors(num_colors=len(self.data), color_kwds="colors")
2082
+ self.kwds.setdefault("colors", colors)
2083
+
2084
+ for i, (label, y) in enumerate(self._iter_data(data=self.data)):
2085
+ ax = self._get_ax(i)
2086
+ if label is not None:
2087
+ label = pprint_thing(label)
2088
+ ax.set_ylabel(label)
2089
+
2090
+ kwds = self.kwds.copy()
2091
+
2092
+ def blank_labeler(label, value):
2093
+ if value == 0:
2094
+ return ""
2095
+ else:
2096
+ return label
2097
+
2098
+ idx = [pprint_thing(v) for v in self.data.index]
2099
+ labels = kwds.pop("labels", idx)
2100
+ # labels is used for each wedge's labels
2101
+ # Blank out labels for values of 0 so they don't overlap
2102
+ # with nonzero wedges
2103
+ if labels is not None:
2104
+ blabels = [blank_labeler(left, value) for left, value in zip(labels, y)]
2105
+ else:
2106
+ blabels = None
2107
+ results = ax.pie(y, labels=blabels, **kwds)
2108
+
2109
+ if kwds.get("autopct", None) is not None:
2110
+ patches, texts, autotexts = results
2111
+ else:
2112
+ patches, texts = results
2113
+ autotexts = []
2114
+
2115
+ if self.fontsize is not None:
2116
+ for t in texts + autotexts:
2117
+ t.set_fontsize(self.fontsize)
2118
+
2119
+ # leglabels is used for legend labels
2120
+ leglabels = labels if labels is not None else idx
2121
+ for _patch, _leglabel in zip(patches, leglabels):
2122
+ self._append_legend_handles_labels(_patch, _leglabel)
2123
+
2124
+ def _post_plot_logic(self, ax: Axes, data) -> None:
2125
+ pass
emu3/lib/python3.10/site-packages/pandas/plotting/_matplotlib/groupby.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ import numpy as np
6
+
7
+ from pandas.core.dtypes.missing import remove_na_arraylike
8
+
9
+ from pandas import (
10
+ MultiIndex,
11
+ concat,
12
+ )
13
+
14
+ from pandas.plotting._matplotlib.misc import unpack_single_str_list
15
+
16
+ if TYPE_CHECKING:
17
+ from collections.abc import Hashable
18
+
19
+ from pandas._typing import IndexLabel
20
+
21
+ from pandas import (
22
+ DataFrame,
23
+ Series,
24
+ )
25
+
26
+
27
+ def create_iter_data_given_by(
28
+ data: DataFrame, kind: str = "hist"
29
+ ) -> dict[Hashable, DataFrame | Series]:
30
+ """
31
+ Create data for iteration given `by` is assigned or not, and it is only
32
+ used in both hist and boxplot.
33
+
34
+ If `by` is assigned, return a dictionary of DataFrames in which the key of
35
+ dictionary is the values in groups.
36
+ If `by` is not assigned, return input as is, and this preserves current
37
+ status of iter_data.
38
+
39
+ Parameters
40
+ ----------
41
+ data : reformatted grouped data from `_compute_plot_data` method.
42
+ kind : str, plot kind. This function is only used for `hist` and `box` plots.
43
+
44
+ Returns
45
+ -------
46
+ iter_data : DataFrame or Dictionary of DataFrames
47
+
48
+ Examples
49
+ --------
50
+ If `by` is assigned:
51
+
52
+ >>> import numpy as np
53
+ >>> tuples = [('h1', 'a'), ('h1', 'b'), ('h2', 'a'), ('h2', 'b')]
54
+ >>> mi = pd.MultiIndex.from_tuples(tuples)
55
+ >>> value = [[1, 3, np.nan, np.nan],
56
+ ... [3, 4, np.nan, np.nan], [np.nan, np.nan, 5, 6]]
57
+ >>> data = pd.DataFrame(value, columns=mi)
58
+ >>> create_iter_data_given_by(data)
59
+ {'h1': h1
60
+ a b
61
+ 0 1.0 3.0
62
+ 1 3.0 4.0
63
+ 2 NaN NaN, 'h2': h2
64
+ a b
65
+ 0 NaN NaN
66
+ 1 NaN NaN
67
+ 2 5.0 6.0}
68
+ """
69
+
70
+ # For `hist` plot, before transformation, the values in level 0 are values
71
+ # in groups and subplot titles, and later used for column subselection and
72
+ # iteration; For `box` plot, values in level 1 are column names to show,
73
+ # and are used for iteration and as subplots titles.
74
+ if kind == "hist":
75
+ level = 0
76
+ else:
77
+ level = 1
78
+
79
+ # Select sub-columns based on the value of level of MI, and if `by` is
80
+ # assigned, data must be a MI DataFrame
81
+ assert isinstance(data.columns, MultiIndex)
82
+ return {
83
+ col: data.loc[:, data.columns.get_level_values(level) == col]
84
+ for col in data.columns.levels[level]
85
+ }
86
+
87
+
88
+ def reconstruct_data_with_by(
89
+ data: DataFrame, by: IndexLabel, cols: IndexLabel
90
+ ) -> DataFrame:
91
+ """
92
+ Internal function to group data, and reassign multiindex column names onto the
93
+ result in order to let grouped data be used in _compute_plot_data method.
94
+
95
+ Parameters
96
+ ----------
97
+ data : Original DataFrame to plot
98
+ by : grouped `by` parameter selected by users
99
+ cols : columns of data set (excluding columns used in `by`)
100
+
101
+ Returns
102
+ -------
103
+ Output is the reconstructed DataFrame with MultiIndex columns. The first level
104
+ of MI is unique values of groups, and second level of MI is the columns
105
+ selected by users.
106
+
107
+ Examples
108
+ --------
109
+ >>> d = {'h': ['h1', 'h1', 'h2'], 'a': [1, 3, 5], 'b': [3, 4, 6]}
110
+ >>> df = pd.DataFrame(d)
111
+ >>> reconstruct_data_with_by(df, by='h', cols=['a', 'b'])
112
+ h1 h2
113
+ a b a b
114
+ 0 1.0 3.0 NaN NaN
115
+ 1 3.0 4.0 NaN NaN
116
+ 2 NaN NaN 5.0 6.0
117
+ """
118
+ by_modified = unpack_single_str_list(by)
119
+ grouped = data.groupby(by_modified)
120
+
121
+ data_list = []
122
+ for key, group in grouped:
123
+ # error: List item 1 has incompatible type "Union[Hashable,
124
+ # Sequence[Hashable]]"; expected "Iterable[Hashable]"
125
+ columns = MultiIndex.from_product([[key], cols]) # type: ignore[list-item]
126
+ sub_group = group[cols]
127
+ sub_group.columns = columns
128
+ data_list.append(sub_group)
129
+
130
+ data = concat(data_list, axis=1)
131
+ return data
132
+
133
+
134
+ def reformat_hist_y_given_by(y: np.ndarray, by: IndexLabel | None) -> np.ndarray:
135
+ """Internal function to reformat y given `by` is applied or not for hist plot.
136
+
137
+ If by is None, input y is 1-d with NaN removed; and if by is not None, groupby
138
+ will take place and input y is multi-dimensional array.
139
+ """
140
+ if by is not None and len(y.shape) > 1:
141
+ return np.array([remove_na_arraylike(col) for col in y.T]).T
142
+ return remove_na_arraylike(y)