ZTWHHH commited on
Commit
d29fd3d
·
verified ·
1 Parent(s): 484de52

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/autoscaler_pb2.py +327 -0
  2. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/event_pb2_grpc.py +66 -0
  3. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_driver_job_event_pb2_grpc.py +4 -0
  4. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_runtime_env_pb2.py +57 -0
  5. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_submission_job_event_pb2_grpc.py +4 -0
  6. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_task_event_pb2.py +129 -0
  7. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_task_event_pb2_grpc.py +4 -0
  8. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/gcs_pb2.py +542 -0
  9. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/gcs_pb2_grpc.py +4 -0
  10. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/gcs_service_pb2_grpc.py +0 -0
  11. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/job_agent_pb2_grpc.py +66 -0
  12. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/ray_client_pb2.py +561 -0
  13. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/reporter_pb2_grpc.py +292 -0
  14. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/resource_pb2_grpc.py +4 -0
  15. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/runtime_env_agent_pb2.py +87 -0
  16. infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/runtime_env_common_pb2_grpc.py +4 -0
  17. janus/lib/python3.10/_weakrefset.py +206 -0
  18. janus/lib/python3.10/asyncore.py +649 -0
  19. janus/lib/python3.10/bisect.py +110 -0
  20. janus/lib/python3.10/configparser.py +1368 -0
  21. janus/lib/python3.10/contextlib.py +745 -0
  22. janus/lib/python3.10/copy.py +304 -0
  23. janus/lib/python3.10/crypt.py +120 -0
  24. janus/lib/python3.10/csv.py +444 -0
  25. janus/lib/python3.10/dataclasses.py +1453 -0
  26. janus/lib/python3.10/difflib.py +2056 -0
  27. janus/lib/python3.10/distutils/__init__.py +20 -0
  28. janus/lib/python3.10/distutils/_msvccompiler.py +546 -0
  29. janus/lib/python3.10/distutils/cmd.py +403 -0
  30. janus/lib/python3.10/distutils/config.py +130 -0
  31. janus/lib/python3.10/distutils/core.py +234 -0
  32. janus/lib/python3.10/distutils/cygwinccompiler.py +406 -0
  33. janus/lib/python3.10/distutils/dep_util.py +92 -0
  34. janus/lib/python3.10/distutils/dir_util.py +210 -0
  35. janus/lib/python3.10/distutils/dist.py +1256 -0
  36. janus/lib/python3.10/distutils/errors.py +97 -0
  37. janus/lib/python3.10/distutils/tests/__pycache__/test_build_py.cpython-310.pyc +0 -0
  38. janus/lib/python3.10/distutils/tests/includetest.rst +1 -0
  39. janus/lib/python3.10/distutils/tests/test_build_ext.py +553 -0
  40. janus/lib/python3.10/distutils/tests/test_build_scripts.py +112 -0
  41. janus/lib/python3.10/distutils/tests/test_config_cmd.py +100 -0
  42. janus/lib/python3.10/distutils/tests/test_dir_util.py +139 -0
  43. janus/lib/python3.10/distutils/tests/test_register.py +324 -0
  44. janus/lib/python3.10/distutils/tests/test_spawn.py +137 -0
  45. janus/lib/python3.10/distutils/tests/test_versionpredicate.py +13 -0
  46. janus/lib/python3.10/distutils/versionpredicate.py +166 -0
  47. janus/lib/python3.10/glob.py +237 -0
  48. janus/lib/python3.10/graphlib.py +246 -0
  49. janus/lib/python3.10/keyword.py +63 -0
  50. janus/lib/python3.10/nntplib.py +1090 -0
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/autoscaler_pb2.py ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: src/ray/protobuf/autoscaler.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf.internal import enum_type_wrapper
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import message as _message
9
+ from google.protobuf import reflection as _reflection
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ # @@protoc_insertion_point(imports)
12
+
13
+ _sym_db = _symbol_database.Default()
14
+
15
+
16
+
17
+
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!src/ray/protobuf/autoscaler.proto\x12\x12ray.rpc.autoscaler\"X\n\x16\x41ntiAffinityConstraint\x12\x1d\n\nlabel_name\x18\x01 \x01(\tR\tlabelName\x12\x1f\n\x0blabel_value\x18\x02 \x01(\tR\nlabelValue\"T\n\x12\x41\x66\x66inityConstraint\x12\x1d\n\nlabel_name\x18\x01 \x01(\tR\tlabelName\x12\x1f\n\x0blabel_value\x18\x02 \x01(\tR\nlabelValue\"\xd3\x01\n\x13PlacementConstraint\x12T\n\ranti_affinity\x18\x01 \x01(\x0b\x32*.ray.rpc.autoscaler.AntiAffinityConstraintH\x00R\x0c\x61ntiAffinity\x88\x01\x01\x12G\n\x08\x61\x66\x66inity\x18\x02 \x01(\x0b\x32&.ray.rpc.autoscaler.AffinityConstraintH\x01R\x08\x61\x66\x66inity\x88\x01\x01\x42\x10\n\x0e_anti_affinityB\x0b\n\t_affinity\"\x98\x02\n\x0fResourceRequest\x12\x63\n\x10resources_bundle\x18\x01 \x03(\x0b\x32\x38.ray.rpc.autoscaler.ResourceRequest.ResourcesBundleEntryR\x0fresourcesBundle\x12\\\n\x15placement_constraints\x18\x02 \x03(\x0b\x32\'.ray.rpc.autoscaler.PlacementConstraintR\x14placementConstraints\x1a\x42\n\x14ResourcesBundleEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\"m\n\x16ResourceRequestByCount\x12=\n\x07request\x18\x01 \x01(\x0b\x32#.ray.rpc.autoscaler.ResourceRequestR\x07request\x12\x14\n\x05\x63ount\x18\x02 \x01(\x03R\x05\x63ount\"p\n\x13GangResourceRequest\x12?\n\x08requests\x18\x01 \x03(\x0b\x32#.ray.rpc.autoscaler.ResourceRequestR\x08requests\x12\x18\n\x07\x64\x65tails\x18\x02 \x01(\tR\x07\x64\x65tails\"h\n\x19\x43lusterResourceConstraint\x12K\n\x0bmin_bundles\x18\x01 \x03(\x0b\x32*.ray.rpc.autoscaler.ResourceRequestByCountR\nminBundles\"\xe6\x06\n\tNodeState\x12\x17\n\x07node_id\x18\x01 \x01(\x0cR\x06nodeId\x12\x1f\n\x0binstance_id\x18\x02 \x01(\tR\ninstanceId\x12+\n\x12ray_node_type_name\x18\x03 \x01(\tR\x0frayNodeTypeName\x12\x66\n\x13\x61vailable_resources\x18\x04 \x03(\x0b\x32\x35.ray.rpc.autoscaler.NodeState.AvailableResourcesEntryR\x12\x61vailableResources\x12Z\n\x0ftotal_resources\x18\x05 \x03(\x0b\x32\x31.ray.rpc.autoscaler.NodeState.TotalResourcesEntryR\x0etotalResources\x12W\n\x0e\x64ynamic_labels\x18\x06 \x03(\x0b\x32\x30.ray.rpc.autoscaler.NodeState.DynamicLabelsEntryR\rdynamicLabels\x12,\n\x12node_state_version\x18\x07 \x01(\x03R\x10nodeStateVersion\x12\x36\n\x06status\x18\x08 \x01(\x0e\x32\x1e.ray.rpc.autoscaler.NodeStatusR\x06status\x12(\n\x10idle_duration_ms\x18\t \x01(\x03R\x0eidleDurationMs\x12&\n\x0fnode_ip_address\x18\n \x01(\tR\rnodeIpAddress\x12,\n\x12instance_type_name\x18\x0b \x01(\tR\x10instanceTypeName\x12#\n\rnode_activity\x18\x0c \x03(\tR\x0cnodeActivity\x1a\x45\n\x17\x41vailableResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x1a\x41\n\x13TotalResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x1a@\n\x12\x44ynamicLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"w\n\x1eGetClusterResourceStateRequest\x12U\n(last_seen_cluster_resource_state_version\x18\x01 \x01(\x03R#lastSeenClusterResourceStateVersion\"\xe0\x04\n\x14\x43lusterResourceState\x12\x43\n\x1e\x63luster_resource_state_version\x18\x01 \x01(\x03R\x1b\x63lusterResourceStateVersion\x12J\n\"last_seen_autoscaler_state_version\x18\x02 \x01(\x03R\x1elastSeenAutoscalerStateVersion\x12>\n\x0bnode_states\x18\x03 \x03(\x0b\x32\x1d.ray.rpc.autoscaler.NodeStateR\nnodeStates\x12\x66\n\x19pending_resource_requests\x18\x04 \x03(\x0b\x32*.ray.rpc.autoscaler.ResourceRequestByCountR\x17pendingResourceRequests\x12l\n\x1epending_gang_resource_requests\x18\x05 \x03(\x0b\x32\'.ray.rpc.autoscaler.GangResourceRequestR\x1bpendingGangResourceRequests\x12o\n\x1c\x63luster_resource_constraints\x18\x06 \x03(\x0b\x32-.ray.rpc.autoscaler.ClusterResourceConstraintR\x1a\x63lusterResourceConstraints\x12\x30\n\x14\x63luster_session_name\x18\x07 \x01(\tR\x12\x63lusterSessionName\"~\n\x1cGetClusterResourceStateReply\x12^\n\x16\x63luster_resource_state\x18\x01 \x01(\x0b\x32(.ray.rpc.autoscaler.ClusterResourceStateR\x14\x63lusterResourceState\"\xa8\x01\n\x16PendingInstanceRequest\x12,\n\x12instance_type_name\x18\x01 \x01(\tR\x10instanceTypeName\x12+\n\x12ray_node_type_name\x18\x02 \x01(\tR\x0frayNodeTypeName\x12\x14\n\x05\x63ount\x18\x03 \x01(\x05R\x05\x63ount\x12\x1d\n\nrequest_ts\x18\x04 \x01(\x03R\trequestTs\"\xd8\x01\n\x15\x46\x61iledInstanceRequest\x12,\n\x12instance_type_name\x18\x01 \x01(\tR\x10instanceTypeName\x12+\n\x12ray_node_type_name\x18\x02 \x01(\tR\x0frayNodeTypeName\x12\x14\n\x05\x63ount\x18\x03 \x01(\x05R\x05\x63ount\x12\x16\n\x06reason\x18\x04 \x01(\tR\x06reason\x12\x19\n\x08start_ts\x18\x05 \x01(\x03R\x07startTs\x12\x1b\n\tfailed_ts\x18\x06 \x01(\x03R\x08\x66\x61iledTs\"\xc6\x01\n\x0fPendingInstance\x12,\n\x12instance_type_name\x18\x01 \x01(\tR\x10instanceTypeName\x12+\n\x12ray_node_type_name\x18\x02 \x01(\tR\x0frayNodeTypeName\x12\x1f\n\x0binstance_id\x18\x03 \x01(\tR\ninstanceId\x12\x1d\n\nip_address\x18\x04 \x01(\tR\tipAddress\x12\x18\n\x07\x64\x65tails\x18\x05 \x01(\tR\x07\x64\x65tails\"\xa4\x06\n\x10\x41utoscalingState\x12U\n(last_seen_cluster_resource_state_version\x18\x01 \x01(\x03R#lastSeenClusterResourceStateVersion\x12\x38\n\x18\x61utoscaler_state_version\x18\x02 \x01(\x03R\x16\x61utoscalerStateVersion\x12\x66\n\x19pending_instance_requests\x18\x03 \x03(\x0b\x32*.ray.rpc.autoscaler.PendingInstanceRequestR\x17pendingInstanceRequests\x12\x65\n\x1cinfeasible_resource_requests\x18\x04 \x03(\x0b\x32#.ray.rpc.autoscaler.ResourceRequestR\x1ainfeasibleResourceRequests\x12r\n!infeasible_gang_resource_requests\x18\x05 \x03(\x0b\x32\'.ray.rpc.autoscaler.GangResourceRequestR\x1einfeasibleGangResourceRequests\x12\x84\x01\n\'infeasible_cluster_resource_constraints\x18\x06 \x03(\x0b\x32-.ray.rpc.autoscaler.ClusterResourceConstraintR$infeasibleClusterResourceConstraints\x12P\n\x11pending_instances\x18\x07 \x03(\x0b\x32#.ray.rpc.autoscaler.PendingInstanceR\x10pendingInstances\x12\x63\n\x18\x66\x61iled_instance_requests\x18\x08 \x03(\x0b\x32).ray.rpc.autoscaler.FailedInstanceRequestR\x16\x66\x61iledInstanceRequests\"r\n\x1dReportAutoscalingStateRequest\x12Q\n\x11\x61utoscaling_state\x18\x01 \x01(\x0b\x32$.ray.rpc.autoscaler.AutoscalingStateR\x10\x61utoscalingState\"\x1d\n\x1bReportAutoscalingStateReply\"\x98\x01\n\'RequestClusterResourceConstraintRequest\x12m\n\x1b\x63luster_resource_constraint\x18\x01 \x01(\x0b\x32-.ray.rpc.autoscaler.ClusterResourceConstraintR\x19\x63lusterResourceConstraint\"\'\n%RequestClusterResourceConstraintReply\"\x19\n\x17GetClusterStatusRequest\"\xca\x01\n\x15GetClusterStatusReply\x12Q\n\x11\x61utoscaling_state\x18\x01 \x01(\x0b\x32$.ray.rpc.autoscaler.AutoscalingStateR\x10\x61utoscalingState\x12^\n\x16\x63luster_resource_state\x18\x02 \x01(\x0b\x32(.ray.rpc.autoscaler.ClusterResourceStateR\x14\x63lusterResourceState\"\xc3\x01\n\x10\x44rainNodeRequest\x12\x17\n\x07node_id\x18\x01 \x01(\x0cR\x06nodeId\x12;\n\x06reason\x18\x02 \x01(\x0e\x32#.ray.rpc.autoscaler.DrainNodeReasonR\x06reason\x12%\n\x0ereason_message\x18\x03 \x01(\tR\rreasonMessage\x12\x32\n\x15\x64\x65\x61\x64line_timestamp_ms\x18\x04 \x01(\x03R\x13\x64\x65\x61\x64lineTimestampMs\"k\n\x0e\x44rainNodeReply\x12\x1f\n\x0bis_accepted\x18\x01 \x01(\x08R\nisAccepted\x12\x38\n\x18rejection_reason_message\x18\x02 \x01(\tR\x16rejectionReasonMessage*L\n\nNodeStatus\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\x08\n\x04\x44\x45\x41\x44\x10\x02\x12\x08\n\x04IDLE\x10\x03\x12\x0c\n\x08\x44RAINING\x10\x04*~\n\x0f\x44rainNodeReason\x12!\n\x1d\x44RAIN_NODE_REASON_UNSPECIFIED\x10\x00\x12&\n\"DRAIN_NODE_REASON_IDLE_TERMINATION\x10\x01\x12 \n\x1c\x44RAIN_NODE_REASON_PREEMPTION\x10\x02\x32\xf7\x04\n\x16\x41utoscalerStateService\x12\x7f\n\x17GetClusterResourceState\x12\x32.ray.rpc.autoscaler.GetClusterResourceStateRequest\x1a\x30.ray.rpc.autoscaler.GetClusterResourceStateReply\x12|\n\x16ReportAutoscalingState\x12\x31.ray.rpc.autoscaler.ReportAutoscalingStateRequest\x1a/.ray.rpc.autoscaler.ReportAutoscalingStateReply\x12\x9a\x01\n RequestClusterResourceConstraint\x12;.ray.rpc.autoscaler.RequestClusterResourceConstraintRequest\x1a\x39.ray.rpc.autoscaler.RequestClusterResourceConstraintReply\x12j\n\x10GetClusterStatus\x12+.ray.rpc.autoscaler.GetClusterStatusRequest\x1a).ray.rpc.autoscaler.GetClusterStatusReply\x12U\n\tDrainNode\x12$.ray.rpc.autoscaler.DrainNodeRequest\x1a\".ray.rpc.autoscaler.DrainNodeReplyB\x03\xf8\x01\x01\x62\x06proto3')
19
+
20
+ _NODESTATUS = DESCRIPTOR.enum_types_by_name['NodeStatus']
21
+ NodeStatus = enum_type_wrapper.EnumTypeWrapper(_NODESTATUS)
22
+ _DRAINNODEREASON = DESCRIPTOR.enum_types_by_name['DrainNodeReason']
23
+ DrainNodeReason = enum_type_wrapper.EnumTypeWrapper(_DRAINNODEREASON)
24
+ UNSPECIFIED = 0
25
+ RUNNING = 1
26
+ DEAD = 2
27
+ IDLE = 3
28
+ DRAINING = 4
29
+ DRAIN_NODE_REASON_UNSPECIFIED = 0
30
+ DRAIN_NODE_REASON_IDLE_TERMINATION = 1
31
+ DRAIN_NODE_REASON_PREEMPTION = 2
32
+
33
+
34
+ _ANTIAFFINITYCONSTRAINT = DESCRIPTOR.message_types_by_name['AntiAffinityConstraint']
35
+ _AFFINITYCONSTRAINT = DESCRIPTOR.message_types_by_name['AffinityConstraint']
36
+ _PLACEMENTCONSTRAINT = DESCRIPTOR.message_types_by_name['PlacementConstraint']
37
+ _RESOURCEREQUEST = DESCRIPTOR.message_types_by_name['ResourceRequest']
38
+ _RESOURCEREQUEST_RESOURCESBUNDLEENTRY = _RESOURCEREQUEST.nested_types_by_name['ResourcesBundleEntry']
39
+ _RESOURCEREQUESTBYCOUNT = DESCRIPTOR.message_types_by_name['ResourceRequestByCount']
40
+ _GANGRESOURCEREQUEST = DESCRIPTOR.message_types_by_name['GangResourceRequest']
41
+ _CLUSTERRESOURCECONSTRAINT = DESCRIPTOR.message_types_by_name['ClusterResourceConstraint']
42
+ _NODESTATE = DESCRIPTOR.message_types_by_name['NodeState']
43
+ _NODESTATE_AVAILABLERESOURCESENTRY = _NODESTATE.nested_types_by_name['AvailableResourcesEntry']
44
+ _NODESTATE_TOTALRESOURCESENTRY = _NODESTATE.nested_types_by_name['TotalResourcesEntry']
45
+ _NODESTATE_DYNAMICLABELSENTRY = _NODESTATE.nested_types_by_name['DynamicLabelsEntry']
46
+ _GETCLUSTERRESOURCESTATEREQUEST = DESCRIPTOR.message_types_by_name['GetClusterResourceStateRequest']
47
+ _CLUSTERRESOURCESTATE = DESCRIPTOR.message_types_by_name['ClusterResourceState']
48
+ _GETCLUSTERRESOURCESTATEREPLY = DESCRIPTOR.message_types_by_name['GetClusterResourceStateReply']
49
+ _PENDINGINSTANCEREQUEST = DESCRIPTOR.message_types_by_name['PendingInstanceRequest']
50
+ _FAILEDINSTANCEREQUEST = DESCRIPTOR.message_types_by_name['FailedInstanceRequest']
51
+ _PENDINGINSTANCE = DESCRIPTOR.message_types_by_name['PendingInstance']
52
+ _AUTOSCALINGSTATE = DESCRIPTOR.message_types_by_name['AutoscalingState']
53
+ _REPORTAUTOSCALINGSTATEREQUEST = DESCRIPTOR.message_types_by_name['ReportAutoscalingStateRequest']
54
+ _REPORTAUTOSCALINGSTATEREPLY = DESCRIPTOR.message_types_by_name['ReportAutoscalingStateReply']
55
+ _REQUESTCLUSTERRESOURCECONSTRAINTREQUEST = DESCRIPTOR.message_types_by_name['RequestClusterResourceConstraintRequest']
56
+ _REQUESTCLUSTERRESOURCECONSTRAINTREPLY = DESCRIPTOR.message_types_by_name['RequestClusterResourceConstraintReply']
57
+ _GETCLUSTERSTATUSREQUEST = DESCRIPTOR.message_types_by_name['GetClusterStatusRequest']
58
+ _GETCLUSTERSTATUSREPLY = DESCRIPTOR.message_types_by_name['GetClusterStatusReply']
59
+ _DRAINNODEREQUEST = DESCRIPTOR.message_types_by_name['DrainNodeRequest']
60
+ _DRAINNODEREPLY = DESCRIPTOR.message_types_by_name['DrainNodeReply']
61
+ AntiAffinityConstraint = _reflection.GeneratedProtocolMessageType('AntiAffinityConstraint', (_message.Message,), {
62
+ 'DESCRIPTOR' : _ANTIAFFINITYCONSTRAINT,
63
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
64
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.AntiAffinityConstraint)
65
+ })
66
+ _sym_db.RegisterMessage(AntiAffinityConstraint)
67
+
68
+ AffinityConstraint = _reflection.GeneratedProtocolMessageType('AffinityConstraint', (_message.Message,), {
69
+ 'DESCRIPTOR' : _AFFINITYCONSTRAINT,
70
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
71
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.AffinityConstraint)
72
+ })
73
+ _sym_db.RegisterMessage(AffinityConstraint)
74
+
75
+ PlacementConstraint = _reflection.GeneratedProtocolMessageType('PlacementConstraint', (_message.Message,), {
76
+ 'DESCRIPTOR' : _PLACEMENTCONSTRAINT,
77
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
78
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.PlacementConstraint)
79
+ })
80
+ _sym_db.RegisterMessage(PlacementConstraint)
81
+
82
+ ResourceRequest = _reflection.GeneratedProtocolMessageType('ResourceRequest', (_message.Message,), {
83
+
84
+ 'ResourcesBundleEntry' : _reflection.GeneratedProtocolMessageType('ResourcesBundleEntry', (_message.Message,), {
85
+ 'DESCRIPTOR' : _RESOURCEREQUEST_RESOURCESBUNDLEENTRY,
86
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
87
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.ResourceRequest.ResourcesBundleEntry)
88
+ })
89
+ ,
90
+ 'DESCRIPTOR' : _RESOURCEREQUEST,
91
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
92
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.ResourceRequest)
93
+ })
94
+ _sym_db.RegisterMessage(ResourceRequest)
95
+ _sym_db.RegisterMessage(ResourceRequest.ResourcesBundleEntry)
96
+
97
+ ResourceRequestByCount = _reflection.GeneratedProtocolMessageType('ResourceRequestByCount', (_message.Message,), {
98
+ 'DESCRIPTOR' : _RESOURCEREQUESTBYCOUNT,
99
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
100
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.ResourceRequestByCount)
101
+ })
102
+ _sym_db.RegisterMessage(ResourceRequestByCount)
103
+
104
+ GangResourceRequest = _reflection.GeneratedProtocolMessageType('GangResourceRequest', (_message.Message,), {
105
+ 'DESCRIPTOR' : _GANGRESOURCEREQUEST,
106
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
107
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.GangResourceRequest)
108
+ })
109
+ _sym_db.RegisterMessage(GangResourceRequest)
110
+
111
+ ClusterResourceConstraint = _reflection.GeneratedProtocolMessageType('ClusterResourceConstraint', (_message.Message,), {
112
+ 'DESCRIPTOR' : _CLUSTERRESOURCECONSTRAINT,
113
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
114
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.ClusterResourceConstraint)
115
+ })
116
+ _sym_db.RegisterMessage(ClusterResourceConstraint)
117
+
118
+ NodeState = _reflection.GeneratedProtocolMessageType('NodeState', (_message.Message,), {
119
+
120
+ 'AvailableResourcesEntry' : _reflection.GeneratedProtocolMessageType('AvailableResourcesEntry', (_message.Message,), {
121
+ 'DESCRIPTOR' : _NODESTATE_AVAILABLERESOURCESENTRY,
122
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
123
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.NodeState.AvailableResourcesEntry)
124
+ })
125
+ ,
126
+
127
+ 'TotalResourcesEntry' : _reflection.GeneratedProtocolMessageType('TotalResourcesEntry', (_message.Message,), {
128
+ 'DESCRIPTOR' : _NODESTATE_TOTALRESOURCESENTRY,
129
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
130
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.NodeState.TotalResourcesEntry)
131
+ })
132
+ ,
133
+
134
+ 'DynamicLabelsEntry' : _reflection.GeneratedProtocolMessageType('DynamicLabelsEntry', (_message.Message,), {
135
+ 'DESCRIPTOR' : _NODESTATE_DYNAMICLABELSENTRY,
136
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
137
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.NodeState.DynamicLabelsEntry)
138
+ })
139
+ ,
140
+ 'DESCRIPTOR' : _NODESTATE,
141
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
142
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.NodeState)
143
+ })
144
+ _sym_db.RegisterMessage(NodeState)
145
+ _sym_db.RegisterMessage(NodeState.AvailableResourcesEntry)
146
+ _sym_db.RegisterMessage(NodeState.TotalResourcesEntry)
147
+ _sym_db.RegisterMessage(NodeState.DynamicLabelsEntry)
148
+
149
+ GetClusterResourceStateRequest = _reflection.GeneratedProtocolMessageType('GetClusterResourceStateRequest', (_message.Message,), {
150
+ 'DESCRIPTOR' : _GETCLUSTERRESOURCESTATEREQUEST,
151
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
152
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.GetClusterResourceStateRequest)
153
+ })
154
+ _sym_db.RegisterMessage(GetClusterResourceStateRequest)
155
+
156
+ ClusterResourceState = _reflection.GeneratedProtocolMessageType('ClusterResourceState', (_message.Message,), {
157
+ 'DESCRIPTOR' : _CLUSTERRESOURCESTATE,
158
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
159
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.ClusterResourceState)
160
+ })
161
+ _sym_db.RegisterMessage(ClusterResourceState)
162
+
163
+ GetClusterResourceStateReply = _reflection.GeneratedProtocolMessageType('GetClusterResourceStateReply', (_message.Message,), {
164
+ 'DESCRIPTOR' : _GETCLUSTERRESOURCESTATEREPLY,
165
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
166
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.GetClusterResourceStateReply)
167
+ })
168
+ _sym_db.RegisterMessage(GetClusterResourceStateReply)
169
+
170
+ PendingInstanceRequest = _reflection.GeneratedProtocolMessageType('PendingInstanceRequest', (_message.Message,), {
171
+ 'DESCRIPTOR' : _PENDINGINSTANCEREQUEST,
172
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
173
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.PendingInstanceRequest)
174
+ })
175
+ _sym_db.RegisterMessage(PendingInstanceRequest)
176
+
177
+ FailedInstanceRequest = _reflection.GeneratedProtocolMessageType('FailedInstanceRequest', (_message.Message,), {
178
+ 'DESCRIPTOR' : _FAILEDINSTANCEREQUEST,
179
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
180
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.FailedInstanceRequest)
181
+ })
182
+ _sym_db.RegisterMessage(FailedInstanceRequest)
183
+
184
+ PendingInstance = _reflection.GeneratedProtocolMessageType('PendingInstance', (_message.Message,), {
185
+ 'DESCRIPTOR' : _PENDINGINSTANCE,
186
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
187
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.PendingInstance)
188
+ })
189
+ _sym_db.RegisterMessage(PendingInstance)
190
+
191
+ AutoscalingState = _reflection.GeneratedProtocolMessageType('AutoscalingState', (_message.Message,), {
192
+ 'DESCRIPTOR' : _AUTOSCALINGSTATE,
193
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
194
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.AutoscalingState)
195
+ })
196
+ _sym_db.RegisterMessage(AutoscalingState)
197
+
198
+ ReportAutoscalingStateRequest = _reflection.GeneratedProtocolMessageType('ReportAutoscalingStateRequest', (_message.Message,), {
199
+ 'DESCRIPTOR' : _REPORTAUTOSCALINGSTATEREQUEST,
200
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
201
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.ReportAutoscalingStateRequest)
202
+ })
203
+ _sym_db.RegisterMessage(ReportAutoscalingStateRequest)
204
+
205
+ ReportAutoscalingStateReply = _reflection.GeneratedProtocolMessageType('ReportAutoscalingStateReply', (_message.Message,), {
206
+ 'DESCRIPTOR' : _REPORTAUTOSCALINGSTATEREPLY,
207
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
208
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.ReportAutoscalingStateReply)
209
+ })
210
+ _sym_db.RegisterMessage(ReportAutoscalingStateReply)
211
+
212
+ RequestClusterResourceConstraintRequest = _reflection.GeneratedProtocolMessageType('RequestClusterResourceConstraintRequest', (_message.Message,), {
213
+ 'DESCRIPTOR' : _REQUESTCLUSTERRESOURCECONSTRAINTREQUEST,
214
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
215
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.RequestClusterResourceConstraintRequest)
216
+ })
217
+ _sym_db.RegisterMessage(RequestClusterResourceConstraintRequest)
218
+
219
+ RequestClusterResourceConstraintReply = _reflection.GeneratedProtocolMessageType('RequestClusterResourceConstraintReply', (_message.Message,), {
220
+ 'DESCRIPTOR' : _REQUESTCLUSTERRESOURCECONSTRAINTREPLY,
221
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
222
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.RequestClusterResourceConstraintReply)
223
+ })
224
+ _sym_db.RegisterMessage(RequestClusterResourceConstraintReply)
225
+
226
+ GetClusterStatusRequest = _reflection.GeneratedProtocolMessageType('GetClusterStatusRequest', (_message.Message,), {
227
+ 'DESCRIPTOR' : _GETCLUSTERSTATUSREQUEST,
228
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
229
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.GetClusterStatusRequest)
230
+ })
231
+ _sym_db.RegisterMessage(GetClusterStatusRequest)
232
+
233
+ GetClusterStatusReply = _reflection.GeneratedProtocolMessageType('GetClusterStatusReply', (_message.Message,), {
234
+ 'DESCRIPTOR' : _GETCLUSTERSTATUSREPLY,
235
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
236
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.GetClusterStatusReply)
237
+ })
238
+ _sym_db.RegisterMessage(GetClusterStatusReply)
239
+
240
+ DrainNodeRequest = _reflection.GeneratedProtocolMessageType('DrainNodeRequest', (_message.Message,), {
241
+ 'DESCRIPTOR' : _DRAINNODEREQUEST,
242
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
243
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.DrainNodeRequest)
244
+ })
245
+ _sym_db.RegisterMessage(DrainNodeRequest)
246
+
247
+ DrainNodeReply = _reflection.GeneratedProtocolMessageType('DrainNodeReply', (_message.Message,), {
248
+ 'DESCRIPTOR' : _DRAINNODEREPLY,
249
+ '__module__' : 'src.ray.protobuf.autoscaler_pb2'
250
+ # @@protoc_insertion_point(class_scope:ray.rpc.autoscaler.DrainNodeReply)
251
+ })
252
+ _sym_db.RegisterMessage(DrainNodeReply)
253
+
254
+ _AUTOSCALERSTATESERVICE = DESCRIPTOR.services_by_name['AutoscalerStateService']
255
+ if _descriptor._USE_C_DESCRIPTORS == False:
256
+
257
+ DESCRIPTOR._options = None
258
+ DESCRIPTOR._serialized_options = b'\370\001\001'
259
+ _RESOURCEREQUEST_RESOURCESBUNDLEENTRY._options = None
260
+ _RESOURCEREQUEST_RESOURCESBUNDLEENTRY._serialized_options = b'8\001'
261
+ _NODESTATE_AVAILABLERESOURCESENTRY._options = None
262
+ _NODESTATE_AVAILABLERESOURCESENTRY._serialized_options = b'8\001'
263
+ _NODESTATE_TOTALRESOURCESENTRY._options = None
264
+ _NODESTATE_TOTALRESOURCESENTRY._serialized_options = b'8\001'
265
+ _NODESTATE_DYNAMICLABELSENTRY._options = None
266
+ _NODESTATE_DYNAMICLABELSENTRY._serialized_options = b'8\001'
267
+ _NODESTATUS._serialized_start=5074
268
+ _NODESTATUS._serialized_end=5150
269
+ _DRAINNODEREASON._serialized_start=5152
270
+ _DRAINNODEREASON._serialized_end=5278
271
+ _ANTIAFFINITYCONSTRAINT._serialized_start=57
272
+ _ANTIAFFINITYCONSTRAINT._serialized_end=145
273
+ _AFFINITYCONSTRAINT._serialized_start=147
274
+ _AFFINITYCONSTRAINT._serialized_end=231
275
+ _PLACEMENTCONSTRAINT._serialized_start=234
276
+ _PLACEMENTCONSTRAINT._serialized_end=445
277
+ _RESOURCEREQUEST._serialized_start=448
278
+ _RESOURCEREQUEST._serialized_end=728
279
+ _RESOURCEREQUEST_RESOURCESBUNDLEENTRY._serialized_start=662
280
+ _RESOURCEREQUEST_RESOURCESBUNDLEENTRY._serialized_end=728
281
+ _RESOURCEREQUESTBYCOUNT._serialized_start=730
282
+ _RESOURCEREQUESTBYCOUNT._serialized_end=839
283
+ _GANGRESOURCEREQUEST._serialized_start=841
284
+ _GANGRESOURCEREQUEST._serialized_end=953
285
+ _CLUSTERRESOURCECONSTRAINT._serialized_start=955
286
+ _CLUSTERRESOURCECONSTRAINT._serialized_end=1059
287
+ _NODESTATE._serialized_start=1062
288
+ _NODESTATE._serialized_end=1932
289
+ _NODESTATE_AVAILABLERESOURCESENTRY._serialized_start=1730
290
+ _NODESTATE_AVAILABLERESOURCESENTRY._serialized_end=1799
291
+ _NODESTATE_TOTALRESOURCESENTRY._serialized_start=1801
292
+ _NODESTATE_TOTALRESOURCESENTRY._serialized_end=1866
293
+ _NODESTATE_DYNAMICLABELSENTRY._serialized_start=1868
294
+ _NODESTATE_DYNAMICLABELSENTRY._serialized_end=1932
295
+ _GETCLUSTERRESOURCESTATEREQUEST._serialized_start=1934
296
+ _GETCLUSTERRESOURCESTATEREQUEST._serialized_end=2053
297
+ _CLUSTERRESOURCESTATE._serialized_start=2056
298
+ _CLUSTERRESOURCESTATE._serialized_end=2664
299
+ _GETCLUSTERRESOURCESTATEREPLY._serialized_start=2666
300
+ _GETCLUSTERRESOURCESTATEREPLY._serialized_end=2792
301
+ _PENDINGINSTANCEREQUEST._serialized_start=2795
302
+ _PENDINGINSTANCEREQUEST._serialized_end=2963
303
+ _FAILEDINSTANCEREQUEST._serialized_start=2966
304
+ _FAILEDINSTANCEREQUEST._serialized_end=3182
305
+ _PENDINGINSTANCE._serialized_start=3185
306
+ _PENDINGINSTANCE._serialized_end=3383
307
+ _AUTOSCALINGSTATE._serialized_start=3386
308
+ _AUTOSCALINGSTATE._serialized_end=4190
309
+ _REPORTAUTOSCALINGSTATEREQUEST._serialized_start=4192
310
+ _REPORTAUTOSCALINGSTATEREQUEST._serialized_end=4306
311
+ _REPORTAUTOSCALINGSTATEREPLY._serialized_start=4308
312
+ _REPORTAUTOSCALINGSTATEREPLY._serialized_end=4337
313
+ _REQUESTCLUSTERRESOURCECONSTRAINTREQUEST._serialized_start=4340
314
+ _REQUESTCLUSTERRESOURCECONSTRAINTREQUEST._serialized_end=4492
315
+ _REQUESTCLUSTERRESOURCECONSTRAINTREPLY._serialized_start=4494
316
+ _REQUESTCLUSTERRESOURCECONSTRAINTREPLY._serialized_end=4533
317
+ _GETCLUSTERSTATUSREQUEST._serialized_start=4535
318
+ _GETCLUSTERSTATUSREQUEST._serialized_end=4560
319
+ _GETCLUSTERSTATUSREPLY._serialized_start=4563
320
+ _GETCLUSTERSTATUSREPLY._serialized_end=4765
321
+ _DRAINNODEREQUEST._serialized_start=4768
322
+ _DRAINNODEREQUEST._serialized_end=4963
323
+ _DRAINNODEREPLY._serialized_start=4965
324
+ _DRAINNODEREPLY._serialized_end=5072
325
+ _AUTOSCALERSTATESERVICE._serialized_start=5281
326
+ _AUTOSCALERSTATESERVICE._serialized_end=5912
327
+ # @@protoc_insertion_point(module_scope)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/event_pb2_grpc.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
5
+ from . import event_pb2 as src_dot_ray_dot_protobuf_dot_event__pb2
6
+
7
+
8
+ class ReportEventServiceStub(object):
9
+ """Missing associated documentation comment in .proto file."""
10
+
11
+ def __init__(self, channel):
12
+ """Constructor.
13
+
14
+ Args:
15
+ channel: A grpc.Channel.
16
+ """
17
+ self.ReportEvents = channel.unary_unary(
18
+ '/ray.rpc.ReportEventService/ReportEvents',
19
+ request_serializer=src_dot_ray_dot_protobuf_dot_event__pb2.ReportEventsRequest.SerializeToString,
20
+ response_deserializer=src_dot_ray_dot_protobuf_dot_event__pb2.ReportEventsReply.FromString,
21
+ )
22
+
23
+
24
+ class ReportEventServiceServicer(object):
25
+ """Missing associated documentation comment in .proto file."""
26
+
27
+ def ReportEvents(self, request, context):
28
+ """Missing associated documentation comment in .proto file."""
29
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
30
+ context.set_details('Method not implemented!')
31
+ raise NotImplementedError('Method not implemented!')
32
+
33
+
34
+ def add_ReportEventServiceServicer_to_server(servicer, server):
35
+ rpc_method_handlers = {
36
+ 'ReportEvents': grpc.unary_unary_rpc_method_handler(
37
+ servicer.ReportEvents,
38
+ request_deserializer=src_dot_ray_dot_protobuf_dot_event__pb2.ReportEventsRequest.FromString,
39
+ response_serializer=src_dot_ray_dot_protobuf_dot_event__pb2.ReportEventsReply.SerializeToString,
40
+ ),
41
+ }
42
+ generic_handler = grpc.method_handlers_generic_handler(
43
+ 'ray.rpc.ReportEventService', rpc_method_handlers)
44
+ server.add_generic_rpc_handlers((generic_handler,))
45
+
46
+
47
+ # This class is part of an EXPERIMENTAL API.
48
+ class ReportEventService(object):
49
+ """Missing associated documentation comment in .proto file."""
50
+
51
+ @staticmethod
52
+ def ReportEvents(request,
53
+ target,
54
+ options=(),
55
+ channel_credentials=None,
56
+ call_credentials=None,
57
+ insecure=False,
58
+ compression=None,
59
+ wait_for_ready=None,
60
+ timeout=None,
61
+ metadata=None):
62
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.ReportEventService/ReportEvents',
63
+ src_dot_ray_dot_protobuf_dot_event__pb2.ReportEventsRequest.SerializeToString,
64
+ src_dot_ray_dot_protobuf_dot_event__pb2.ReportEventsReply.FromString,
65
+ options, channel_credentials,
66
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_driver_job_event_pb2_grpc.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_runtime_env_pb2.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: src/ray/protobuf/export_api/export_runtime_env.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf import descriptor as _descriptor
6
+ from google.protobuf import descriptor_pool as _descriptor_pool
7
+ from google.protobuf import message as _message
8
+ from google.protobuf import reflection as _reflection
9
+ from google.protobuf import symbol_database as _symbol_database
10
+ # @@protoc_insertion_point(imports)
11
+
12
+ _sym_db = _symbol_database.Default()
13
+
14
+
15
+
16
+
17
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n4src/ray/protobuf/export_api/export_runtime_env.proto\x12\x07ray.rpc\"\xd9\x03\n\x14\x45xportRuntimeEnvInfo\x12\x34\n\x16serialized_runtime_env\x18\x01 \x01(\tR\x14serializedRuntimeEnv\x12@\n\x04uris\x18\x02 \x01(\x0b\x32,.ray.rpc.ExportRuntimeEnvInfo.RuntimeEnvUrisR\x04uris\x12\\\n\x12runtime_env_config\x18\x03 \x01(\x0b\x32..ray.rpc.ExportRuntimeEnvInfo.RuntimeEnvConfigR\x10runtimeEnvConfig\x1a`\n\x0eRuntimeEnvUris\x12&\n\x0fworking_dir_uri\x18\x01 \x01(\tR\rworkingDirUri\x12&\n\x0fpy_modules_uris\x18\x02 \x03(\tR\rpyModulesUris\x1a\x88\x01\n\x10RuntimeEnvConfig\x12\x32\n\x15setup_timeout_seconds\x18\x01 \x01(\x05R\x13setupTimeoutSeconds\x12#\n\reager_install\x18\x02 \x01(\x08R\x0c\x65\x61gerInstall\x12\x1b\n\tlog_files\x18\x03 \x03(\tR\x08logFilesB\x03\xf8\x01\x01\x62\x06proto3')
18
+
19
+
20
+
21
+ _EXPORTRUNTIMEENVINFO = DESCRIPTOR.message_types_by_name['ExportRuntimeEnvInfo']
22
+ _EXPORTRUNTIMEENVINFO_RUNTIMEENVURIS = _EXPORTRUNTIMEENVINFO.nested_types_by_name['RuntimeEnvUris']
23
+ _EXPORTRUNTIMEENVINFO_RUNTIMEENVCONFIG = _EXPORTRUNTIMEENVINFO.nested_types_by_name['RuntimeEnvConfig']
24
+ ExportRuntimeEnvInfo = _reflection.GeneratedProtocolMessageType('ExportRuntimeEnvInfo', (_message.Message,), {
25
+
26
+ 'RuntimeEnvUris' : _reflection.GeneratedProtocolMessageType('RuntimeEnvUris', (_message.Message,), {
27
+ 'DESCRIPTOR' : _EXPORTRUNTIMEENVINFO_RUNTIMEENVURIS,
28
+ '__module__' : 'src.ray.protobuf.export_api.export_runtime_env_pb2'
29
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportRuntimeEnvInfo.RuntimeEnvUris)
30
+ })
31
+ ,
32
+
33
+ 'RuntimeEnvConfig' : _reflection.GeneratedProtocolMessageType('RuntimeEnvConfig', (_message.Message,), {
34
+ 'DESCRIPTOR' : _EXPORTRUNTIMEENVINFO_RUNTIMEENVCONFIG,
35
+ '__module__' : 'src.ray.protobuf.export_api.export_runtime_env_pb2'
36
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportRuntimeEnvInfo.RuntimeEnvConfig)
37
+ })
38
+ ,
39
+ 'DESCRIPTOR' : _EXPORTRUNTIMEENVINFO,
40
+ '__module__' : 'src.ray.protobuf.export_api.export_runtime_env_pb2'
41
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportRuntimeEnvInfo)
42
+ })
43
+ _sym_db.RegisterMessage(ExportRuntimeEnvInfo)
44
+ _sym_db.RegisterMessage(ExportRuntimeEnvInfo.RuntimeEnvUris)
45
+ _sym_db.RegisterMessage(ExportRuntimeEnvInfo.RuntimeEnvConfig)
46
+
47
+ if _descriptor._USE_C_DESCRIPTORS == False:
48
+
49
+ DESCRIPTOR._options = None
50
+ DESCRIPTOR._serialized_options = b'\370\001\001'
51
+ _EXPORTRUNTIMEENVINFO._serialized_start=66
52
+ _EXPORTRUNTIMEENVINFO._serialized_end=539
53
+ _EXPORTRUNTIMEENVINFO_RUNTIMEENVURIS._serialized_start=304
54
+ _EXPORTRUNTIMEENVINFO_RUNTIMEENVURIS._serialized_end=400
55
+ _EXPORTRUNTIMEENVINFO_RUNTIMEENVCONFIG._serialized_start=403
56
+ _EXPORTRUNTIMEENVINFO_RUNTIMEENVCONFIG._serialized_end=539
57
+ # @@protoc_insertion_point(module_scope)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_submission_job_event_pb2_grpc.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_task_event_pb2.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: src/ray/protobuf/export_api/export_task_event.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf import descriptor as _descriptor
6
+ from google.protobuf import descriptor_pool as _descriptor_pool
7
+ from google.protobuf import message as _message
8
+ from google.protobuf import reflection as _reflection
9
+ from google.protobuf import symbol_database as _symbol_database
10
+ # @@protoc_insertion_point(imports)
11
+
12
+ _sym_db = _symbol_database.Default()
13
+
14
+
15
+ from . import common_pb2 as src_dot_ray_dot_protobuf_dot_common__pb2
16
+ from . import export_runtime_env_pb2 as src_dot_ray_dot_protobuf_dot_export__api_dot_export__runtime__env__pb2
17
+
18
+
19
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n3src/ray/protobuf/export_api/export_task_event.proto\x12\x07ray.rpc\x1a\x1dsrc/ray/protobuf/common.proto\x1a\x34src/ray/protobuf/export_api/export_runtime_env.proto\"\xdc\x12\n\x13\x45xportTaskEventData\x12\x17\n\x07task_id\x18\x01 \x01(\x0cR\x06taskId\x12%\n\x0e\x61ttempt_number\x18\x02 \x01(\x05R\rattemptNumber\x12L\n\ttask_info\x18\x03 \x01(\x0b\x32*.ray.rpc.ExportTaskEventData.TaskInfoEntryH\x00R\x08taskInfo\x88\x01\x01\x12V\n\rstate_updates\x18\x04 \x01(\x0b\x32,.ray.rpc.ExportTaskEventData.TaskStateUpdateH\x01R\x0cstateUpdates\x88\x01\x01\x12V\n\x0eprofile_events\x18\x05 \x01(\x0b\x32*.ray.rpc.ExportTaskEventData.ProfileEventsH\x02R\rprofileEvents\x88\x01\x01\x12\x15\n\x06job_id\x18\x06 \x01(\x0cR\x05jobId\x1a\x66\n\x0cRayErrorInfo\x12#\n\rerror_message\x18\x01 \x01(\tR\x0c\x65rrorMessage\x12\x31\n\nerror_type\x18\x02 \x01(\x0e\x32\x12.ray.rpc.ErrorTypeR\terrorType\x1a\xd1\x02\n\x0bTaskLogInfo\x12$\n\x0bstdout_file\x18\x01 \x01(\tH\x00R\nstdoutFile\x88\x01\x01\x12$\n\x0bstderr_file\x18\x02 \x01(\tH\x01R\nstderrFile\x88\x01\x01\x12&\n\x0cstdout_start\x18\x03 \x01(\x05H\x02R\x0bstdoutStart\x88\x01\x01\x12\"\n\nstdout_end\x18\x04 \x01(\x05H\x03R\tstdoutEnd\x88\x01\x01\x12&\n\x0cstderr_start\x18\x05 \x01(\x05H\x04R\x0bstderrStart\x88\x01\x01\x12\"\n\nstderr_end\x18\x06 \x01(\x05H\x05R\tstderrEnd\x88\x01\x01\x42\x0e\n\x0c_stdout_fileB\x0e\n\x0c_stderr_fileB\x0f\n\r_stdout_startB\r\n\x0b_stdout_endB\x0f\n\r_stderr_startB\r\n\x0b_stderr_end\x1a\xc6\x04\n\x0fTaskStateUpdate\x12\x1c\n\x07node_id\x18\x01 \x01(\x0cH\x00R\x06nodeId\x88\x01\x01\x12 \n\tworker_id\x18\x02 \x01(\x0cH\x01R\x08workerId\x88\x01\x01\x12M\n\nerror_info\x18\x03 \x01(\x0b\x32).ray.rpc.ExportTaskEventData.RayErrorInfoH\x02R\terrorInfo\x88\x01\x01\x12Q\n\rtask_log_info\x18\x04 \x01(\x0b\x32(.ray.rpc.ExportTaskEventData.TaskLogInfoH\x03R\x0btaskLogInfo\x88\x01\x01\x12\"\n\nworker_pid\x18\x05 \x01(\x05H\x04R\tworkerPid\x88\x01\x01\x12\x31\n\x12is_debugger_paused\x18\x06 \x01(\x08H\x05R\x10isDebuggerPaused\x88\x01\x01\x12[\n\x0bstate_ts_ns\x18\x07 \x03(\x0b\x32;.ray.rpc.ExportTaskEventData.TaskStateUpdate.StateTsNsEntryR\tstateTsNs\x1a<\n\x0eStateTsNsEntry\x12\x10\n\x03key\x18\x01 \x01(\x05R\x03key\x12\x14\n\x05value\x18\x02 \x01(\x03R\x05value:\x02\x38\x01\x42\n\n\x08_node_idB\x0c\n\n_worker_idB\r\n\x0b_error_infoB\x10\n\x0e_task_log_infoB\r\n\x0b_worker_pidB\x15\n\x13_is_debugger_paused\x1a\xc9\x04\n\rTaskInfoEntry\x12%\n\x04type\x18\x01 \x01(\x0e\x32\x11.ray.rpc.TaskTypeR\x04type\x12-\n\x08language\x18\x02 \x01(\x0e\x32\x11.ray.rpc.LanguageR\x08language\x12+\n\x12\x66unc_or_class_name\x18\x03 \x01(\tR\x0f\x66uncOrClassName\x12\x17\n\x07task_id\x18\x04 \x01(\x0cR\x06taskId\x12$\n\x0eparent_task_id\x18\x05 \x01(\x0cR\x0cparentTaskId\x12p\n\x12required_resources\x18\x06 \x03(\x0b\x32\x41.ray.rpc.ExportTaskEventData.TaskInfoEntry.RequiredResourcesEntryR\x11requiredResources\x12G\n\x10runtime_env_info\x18\x07 \x01(\x0b\x32\x1d.ray.rpc.ExportRuntimeEnvInfoR\x0eruntimeEnvInfo\x12\x1e\n\x08\x61\x63tor_id\x18\x08 \x01(\x0cH\x00R\x07\x61\x63torId\x88\x01\x01\x12\x31\n\x12placement_group_id\x18\t \x01(\x0cH\x01R\x10placementGroupId\x88\x01\x01\x1a\x44\n\x16RequiredResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x42\x0b\n\t_actor_idB\x15\n\x13_placement_group_id\x1a\x9f\x01\n\x11ProfileEventEntry\x12\x1d\n\nstart_time\x18\x01 \x01(\x03R\tstartTime\x12\x19\n\x08\x65nd_time\x18\x02 \x01(\x03R\x07\x65ndTime\x12\"\n\nextra_data\x18\x03 \x01(\tH\x00R\textraData\x88\x01\x01\x12\x1d\n\nevent_name\x18\x04 \x01(\tR\teventNameB\r\n\x0b_extra_data\x1a\xc9\x01\n\rProfileEvents\x12%\n\x0e\x63omponent_type\x18\x01 \x01(\tR\rcomponentType\x12!\n\x0c\x63omponent_id\x18\x02 \x01(\x0cR\x0b\x63omponentId\x12&\n\x0fnode_ip_address\x18\x03 \x01(\tR\rnodeIpAddress\x12\x46\n\x06\x65vents\x18\x04 \x03(\x0b\x32..ray.rpc.ExportTaskEventData.ProfileEventEntryR\x06\x65ventsB\x0c\n\n_task_infoB\x10\n\x0e_state_updatesB\x11\n\x0f_profile_eventsB\x03\xf8\x01\x01\x62\x06proto3')
20
+
21
+
22
+
23
+ _EXPORTTASKEVENTDATA = DESCRIPTOR.message_types_by_name['ExportTaskEventData']
24
+ _EXPORTTASKEVENTDATA_RAYERRORINFO = _EXPORTTASKEVENTDATA.nested_types_by_name['RayErrorInfo']
25
+ _EXPORTTASKEVENTDATA_TASKLOGINFO = _EXPORTTASKEVENTDATA.nested_types_by_name['TaskLogInfo']
26
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE = _EXPORTTASKEVENTDATA.nested_types_by_name['TaskStateUpdate']
27
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE_STATETSNSENTRY = _EXPORTTASKEVENTDATA_TASKSTATEUPDATE.nested_types_by_name['StateTsNsEntry']
28
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY = _EXPORTTASKEVENTDATA.nested_types_by_name['TaskInfoEntry']
29
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY_REQUIREDRESOURCESENTRY = _EXPORTTASKEVENTDATA_TASKINFOENTRY.nested_types_by_name['RequiredResourcesEntry']
30
+ _EXPORTTASKEVENTDATA_PROFILEEVENTENTRY = _EXPORTTASKEVENTDATA.nested_types_by_name['ProfileEventEntry']
31
+ _EXPORTTASKEVENTDATA_PROFILEEVENTS = _EXPORTTASKEVENTDATA.nested_types_by_name['ProfileEvents']
32
+ ExportTaskEventData = _reflection.GeneratedProtocolMessageType('ExportTaskEventData', (_message.Message,), {
33
+
34
+ 'RayErrorInfo' : _reflection.GeneratedProtocolMessageType('RayErrorInfo', (_message.Message,), {
35
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_RAYERRORINFO,
36
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
37
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.RayErrorInfo)
38
+ })
39
+ ,
40
+
41
+ 'TaskLogInfo' : _reflection.GeneratedProtocolMessageType('TaskLogInfo', (_message.Message,), {
42
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_TASKLOGINFO,
43
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
44
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.TaskLogInfo)
45
+ })
46
+ ,
47
+
48
+ 'TaskStateUpdate' : _reflection.GeneratedProtocolMessageType('TaskStateUpdate', (_message.Message,), {
49
+
50
+ 'StateTsNsEntry' : _reflection.GeneratedProtocolMessageType('StateTsNsEntry', (_message.Message,), {
51
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_TASKSTATEUPDATE_STATETSNSENTRY,
52
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
53
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.TaskStateUpdate.StateTsNsEntry)
54
+ })
55
+ ,
56
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_TASKSTATEUPDATE,
57
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
58
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.TaskStateUpdate)
59
+ })
60
+ ,
61
+
62
+ 'TaskInfoEntry' : _reflection.GeneratedProtocolMessageType('TaskInfoEntry', (_message.Message,), {
63
+
64
+ 'RequiredResourcesEntry' : _reflection.GeneratedProtocolMessageType('RequiredResourcesEntry', (_message.Message,), {
65
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_TASKINFOENTRY_REQUIREDRESOURCESENTRY,
66
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
67
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.TaskInfoEntry.RequiredResourcesEntry)
68
+ })
69
+ ,
70
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_TASKINFOENTRY,
71
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
72
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.TaskInfoEntry)
73
+ })
74
+ ,
75
+
76
+ 'ProfileEventEntry' : _reflection.GeneratedProtocolMessageType('ProfileEventEntry', (_message.Message,), {
77
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_PROFILEEVENTENTRY,
78
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
79
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.ProfileEventEntry)
80
+ })
81
+ ,
82
+
83
+ 'ProfileEvents' : _reflection.GeneratedProtocolMessageType('ProfileEvents', (_message.Message,), {
84
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA_PROFILEEVENTS,
85
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
86
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData.ProfileEvents)
87
+ })
88
+ ,
89
+ 'DESCRIPTOR' : _EXPORTTASKEVENTDATA,
90
+ '__module__' : 'src.ray.protobuf.export_api.export_task_event_pb2'
91
+ # @@protoc_insertion_point(class_scope:ray.rpc.ExportTaskEventData)
92
+ })
93
+ _sym_db.RegisterMessage(ExportTaskEventData)
94
+ _sym_db.RegisterMessage(ExportTaskEventData.RayErrorInfo)
95
+ _sym_db.RegisterMessage(ExportTaskEventData.TaskLogInfo)
96
+ _sym_db.RegisterMessage(ExportTaskEventData.TaskStateUpdate)
97
+ _sym_db.RegisterMessage(ExportTaskEventData.TaskStateUpdate.StateTsNsEntry)
98
+ _sym_db.RegisterMessage(ExportTaskEventData.TaskInfoEntry)
99
+ _sym_db.RegisterMessage(ExportTaskEventData.TaskInfoEntry.RequiredResourcesEntry)
100
+ _sym_db.RegisterMessage(ExportTaskEventData.ProfileEventEntry)
101
+ _sym_db.RegisterMessage(ExportTaskEventData.ProfileEvents)
102
+
103
+ if _descriptor._USE_C_DESCRIPTORS == False:
104
+
105
+ DESCRIPTOR._options = None
106
+ DESCRIPTOR._serialized_options = b'\370\001\001'
107
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE_STATETSNSENTRY._options = None
108
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE_STATETSNSENTRY._serialized_options = b'8\001'
109
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY_REQUIREDRESOURCESENTRY._options = None
110
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY_REQUIREDRESOURCESENTRY._serialized_options = b'8\001'
111
+ _EXPORTTASKEVENTDATA._serialized_start=150
112
+ _EXPORTTASKEVENTDATA._serialized_end=2546
113
+ _EXPORTTASKEVENTDATA_RAYERRORINFO._serialized_start=514
114
+ _EXPORTTASKEVENTDATA_RAYERRORINFO._serialized_end=616
115
+ _EXPORTTASKEVENTDATA_TASKLOGINFO._serialized_start=619
116
+ _EXPORTTASKEVENTDATA_TASKLOGINFO._serialized_end=956
117
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE._serialized_start=959
118
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE._serialized_end=1541
119
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE_STATETSNSENTRY._serialized_start=1384
120
+ _EXPORTTASKEVENTDATA_TASKSTATEUPDATE_STATETSNSENTRY._serialized_end=1444
121
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY._serialized_start=1544
122
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY._serialized_end=2129
123
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY_REQUIREDRESOURCESENTRY._serialized_start=2025
124
+ _EXPORTTASKEVENTDATA_TASKINFOENTRY_REQUIREDRESOURCESENTRY._serialized_end=2093
125
+ _EXPORTTASKEVENTDATA_PROFILEEVENTENTRY._serialized_start=2132
126
+ _EXPORTTASKEVENTDATA_PROFILEEVENTENTRY._serialized_end=2291
127
+ _EXPORTTASKEVENTDATA_PROFILEEVENTS._serialized_start=2294
128
+ _EXPORTTASKEVENTDATA_PROFILEEVENTS._serialized_end=2495
129
+ # @@protoc_insertion_point(module_scope)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/export_task_event_pb2_grpc.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/gcs_pb2.py ADDED
@@ -0,0 +1,542 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: src/ray/protobuf/gcs.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf.internal import enum_type_wrapper
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import message as _message
9
+ from google.protobuf import reflection as _reflection
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ # @@protoc_insertion_point(imports)
12
+
13
+ _sym_db = _symbol_database.Default()
14
+
15
+
16
+ from . import common_pb2 as src_dot_ray_dot_protobuf_dot_common__pb2
17
+
18
+
19
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1asrc/ray/protobuf/gcs.proto\x12\x07ray.rpc\x1a\x1dsrc/ray/protobuf/common.proto\"m\n\x08GcsEntry\x12\x37\n\x0b\x63hange_mode\x18\x01 \x01(\x0e\x32\x16.ray.rpc.GcsChangeModeR\nchangeMode\x12\x0e\n\x02id\x18\x02 \x01(\x0cR\x02id\x12\x18\n\x07\x65ntries\x18\x03 \x03(\x0cR\x07\x65ntries\"\xbc\n\n\x0e\x41\x63torTableData\x12\x19\n\x08\x61\x63tor_id\x18\x01 \x01(\x0cR\x07\x61\x63torId\x12\x1b\n\tparent_id\x18\x02 \x01(\x0cR\x08parentId\x12\x15\n\x06job_id\x18\x04 \x01(\x0cR\x05jobId\x12\x38\n\x05state\x18\x06 \x01(\x0e\x32\".ray.rpc.ActorTableData.ActorStateR\x05state\x12!\n\x0cmax_restarts\x18\x07 \x01(\x03R\x0bmaxRestarts\x12!\n\x0cnum_restarts\x18\x08 \x01(\x04R\x0bnumRestarts\x12*\n\x07\x61\x64\x64ress\x18\t \x01(\x0b\x32\x10.ray.rpc.AddressR\x07\x61\x64\x64ress\x12\x35\n\rowner_address\x18\n \x01(\x0b\x32\x10.ray.rpc.AddressR\x0cownerAddress\x12\x1f\n\x0bis_detached\x18\x0b \x01(\x08R\nisDetached\x12\x12\n\x04name\x18\x0c \x01(\tR\x04name\x12\x1c\n\ttimestamp\x18\r \x01(\x01R\ttimestamp\x12\x44\n\x10resource_mapping\x18\x0f \x03(\x0b\x32\x19.ray.rpc.ResourceMapEntryR\x0fresourceMapping\x12\x10\n\x03pid\x18\x10 \x01(\rR\x03pid\x12L\n\x13\x66unction_descriptor\x18\x11 \x01(\x0b\x32\x1b.ray.rpc.FunctionDescriptorR\x12\x66unctionDescriptor\x12#\n\rray_namespace\x18\x13 \x01(\tR\x0crayNamespace\x12\x1d\n\nstart_time\x18\x14 \x01(\x04R\tstartTime\x12\x19\n\x08\x65nd_time\x18\x15 \x01(\x04R\x07\x65ndTime\x12\x34\n\x16serialized_runtime_env\x18\x16 \x01(\tR\x14serializedRuntimeEnv\x12\x1d\n\nclass_name\x18\x17 \x01(\tR\tclassName\x12\x39\n\x0b\x64\x65\x61th_cause\x18\x18 \x01(\x0b\x32\x18.ray.rpc.ActorDeathCauseR\ndeathCause\x12]\n\x12required_resources\x18\x1c \x03(\x0b\x32..ray.rpc.ActorTableData.RequiredResourcesEntryR\x11requiredResources\x12\x1c\n\x07node_id\x18\x1d \x01(\x0cH\x00R\x06nodeId\x88\x01\x01\x12\x31\n\x12placement_group_id\x18\x1e \x01(\x0cH\x01R\x10placementGroupId\x88\x01\x01\x12\x1b\n\trepr_name\x18\x1f \x01(\tR\x08reprName\x12\x1c\n\tpreempted\x18 \x01(\x08R\tpreempted\x12Y\n*num_restarts_due_to_lineage_reconstruction\x18! \x01(\x04R%numRestartsDueToLineageReconstruction\x1a\x44\n\x16RequiredResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\"a\n\nActorState\x12\x18\n\x14\x44\x45PENDENCIES_UNREADY\x10\x00\x12\x14\n\x10PENDING_CREATION\x10\x01\x12\t\n\x05\x41LIVE\x10\x02\x12\x0e\n\nRESTARTING\x10\x03\x12\x08\n\x04\x44\x45\x41\x44\x10\x04\x42\n\n\x08_node_idB\x15\n\x13_placement_group_id\"~\n\x0e\x45rrorTableData\x12\x15\n\x06job_id\x18\x01 \x01(\x0cR\x05jobId\x12\x12\n\x04type\x18\x02 \x01(\tR\x04type\x12#\n\rerror_message\x18\x03 \x01(\tR\x0c\x65rrorMessage\x12\x1c\n\ttimestamp\x18\x04 \x01(\x01R\ttimestamp\"\x9f\x01\n\x11ProfileEventEntry\x12\x1d\n\nstart_time\x18\x01 \x01(\x03R\tstartTime\x12\x19\n\x08\x65nd_time\x18\x02 \x01(\x03R\x07\x65ndTime\x12\"\n\nextra_data\x18\x03 \x01(\tH\x00R\textraData\x88\x01\x01\x12\x1d\n\nevent_name\x18\x04 \x01(\tR\teventNameB\r\n\x0b_extra_data\"\xb5\x01\n\rProfileEvents\x12%\n\x0e\x63omponent_type\x18\x01 \x01(\tR\rcomponentType\x12!\n\x0c\x63omponent_id\x18\x02 \x01(\x0cR\x0b\x63omponentId\x12&\n\x0fnode_ip_address\x18\x03 \x01(\tR\rnodeIpAddress\x12\x32\n\x06\x65vents\x18\x04 \x03(\x0b\x32\x1a.ray.rpc.ProfileEventEntryR\x06\x65vents\"\xd1\x02\n\x0bTaskLogInfo\x12$\n\x0bstdout_file\x18\x01 \x01(\tH\x00R\nstdoutFile\x88\x01\x01\x12$\n\x0bstderr_file\x18\x02 \x01(\tH\x01R\nstderrFile\x88\x01\x01\x12&\n\x0cstdout_start\x18\x03 \x01(\x05H\x02R\x0bstdoutStart\x88\x01\x01\x12\"\n\nstdout_end\x18\x04 \x01(\x05H\x03R\tstdoutEnd\x88\x01\x01\x12&\n\x0cstderr_start\x18\x05 \x01(\x05H\x04R\x0bstderrStart\x88\x01\x01\x12\"\n\nstderr_end\x18\x06 \x01(\x05H\x05R\tstderrEnd\x88\x01\x01\x42\x0e\n\x0c_stdout_fileB\x0e\n\x0c_stderr_fileB\x0f\n\r_stdout_startB\r\n\x0b_stdout_endB\x0f\n\r_stderr_startB\r\n\x0b_stderr_end\"\xcb\x04\n\x0fTaskStateUpdate\x12\x1c\n\x07node_id\x18\x01 \x01(\x0cH\x00R\x06nodeId\x88\x01\x01\x12 \n\tworker_id\x18\x08 \x01(\x0cH\x01R\x08workerId\x88\x01\x01\x12\x39\n\nerror_info\x18\t \x01(\x0b\x32\x15.ray.rpc.RayErrorInfoH\x02R\terrorInfo\x88\x01\x01\x12=\n\rtask_log_info\x18\n \x01(\x0b\x32\x14.ray.rpc.TaskLogInfoH\x03R\x0btaskLogInfo\x88\x01\x01\x12+\n\x0f\x61\x63tor_repr_name\x18\x0b \x01(\tH\x04R\ractorReprName\x88\x01\x01\x12\"\n\nworker_pid\x18\x0c \x01(\x05H\x05R\tworkerPid\x88\x01\x01\x12\x31\n\x12is_debugger_paused\x18\r \x01(\x08H\x06R\x10isDebuggerPaused\x88\x01\x01\x12G\n\x0bstate_ts_ns\x18\x0e \x03(\x0b\x32\'.ray.rpc.TaskStateUpdate.StateTsNsEntryR\tstateTsNs\x1a<\n\x0eStateTsNsEntry\x12\x10\n\x03key\x18\x01 \x01(\x05R\x03key\x12\x14\n\x05value\x18\x02 \x01(\x03R\x05value:\x02\x38\x01\x42\n\n\x08_node_idB\x0c\n\n_worker_idB\r\n\x0b_error_infoB\x10\n\x0e_task_log_infoB\x12\n\x10_actor_repr_nameB\r\n\x0b_worker_pidB\x15\n\x13_is_debugger_paused\"\xd8\x02\n\nTaskEvents\x12\x17\n\x07task_id\x18\x01 \x01(\x0cR\x06taskId\x12%\n\x0e\x61ttempt_number\x18\x02 \x01(\x05R\rattemptNumber\x12\x38\n\ttask_info\x18\x03 \x01(\x0b\x32\x16.ray.rpc.TaskInfoEntryH\x00R\x08taskInfo\x88\x01\x01\x12\x42\n\rstate_updates\x18\x04 \x01(\x0b\x32\x18.ray.rpc.TaskStateUpdateH\x01R\x0cstateUpdates\x88\x01\x01\x12\x42\n\x0eprofile_events\x18\x05 \x01(\x0b\x32\x16.ray.rpc.ProfileEventsH\x02R\rprofileEvents\x88\x01\x01\x12\x15\n\x06job_id\x18\x06 \x01(\x0cR\x05jobIdB\x0c\n\n_task_infoB\x10\n\x0e_state_updatesB\x11\n\x0f_profile_events\"M\n\x0bTaskAttempt\x12\x17\n\x07task_id\x18\x01 \x01(\x0cR\x06taskId\x12%\n\x0e\x61ttempt_number\x18\x02 \x01(\x05R\rattemptNumber\"\xe8\x01\n\rTaskEventData\x12\x39\n\x0e\x65vents_by_task\x18\x01 \x03(\x0b\x32\x13.ray.rpc.TaskEventsR\x0c\x65ventsByTask\x12H\n\x15\x64ropped_task_attempts\x18\x02 \x03(\x0b\x32\x14.ray.rpc.TaskAttemptR\x13\x64roppedTaskAttempts\x12;\n\x1anum_profile_events_dropped\x18\x03 \x01(\x05R\x17numProfileEventsDropped\x12\x15\n\x06job_id\x18\x04 \x01(\x0cR\x05jobId\"\xda\x01\n\x12\x41vailableResources\x12\x17\n\x07node_id\x18\x01 \x01(\x0cR\x06nodeId\x12\x64\n\x13resources_available\x18\x02 \x03(\x0b\x32\x33.ray.rpc.AvailableResources.ResourcesAvailableEntryR\x12resourcesAvailable\x1a\x45\n\x17ResourcesAvailableEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\"\xc2\x01\n\x0eTotalResources\x12\x17\n\x07node_id\x18\x01 \x01(\x0cR\x06nodeId\x12T\n\x0fresources_total\x18\x02 \x03(\x0b\x32+.ray.rpc.TotalResources.ResourcesTotalEntryR\x0eresourcesTotal\x1a\x41\n\x13ResourcesTotalEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\"\xcc\x01\n\x0cNodeSnapshot\x12\x31\n\x05state\x18\x01 \x01(\x0e\x32\x1b.ray.rpc.NodeSnapshot.StateR\x05state\x12(\n\x10idle_duration_ms\x18\x02 \x01(\x03R\x0eidleDurationMs\x12#\n\rnode_activity\x18\x03 \x03(\tR\x0cnodeActivity\":\n\x05State\x12\r\n\tUNDEFINED\x10\x00\x12\x08\n\x04IDLE\x10\x01\x12\n\n\x06\x41\x43TIVE\x10\x02\x12\x0c\n\x08\x44RAINING\x10\x03\"\x8a\t\n\x0bGcsNodeInfo\x12\x17\n\x07node_id\x18\x01 \x01(\x0cR\x06nodeId\x12\x30\n\x14node_manager_address\x18\x02 \x01(\tR\x12nodeManagerAddress\x12,\n\x12raylet_socket_name\x18\x03 \x01(\tR\x10rayletSocketName\x12\x37\n\x18object_store_socket_name\x18\x04 \x01(\tR\x15objectStoreSocketName\x12*\n\x11node_manager_port\x18\x05 \x01(\x05R\x0fnodeManagerPort\x12.\n\x13object_manager_port\x18\x06 \x01(\x05R\x11objectManagerPort\x12\x37\n\x05state\x18\x07 \x01(\x0e\x32!.ray.rpc.GcsNodeInfo.GcsNodeStateR\x05state\x12\x32\n\x15node_manager_hostname\x18\x08 \x01(\tR\x13nodeManagerHostname\x12.\n\x13metrics_export_port\x18\t \x01(\x05R\x11metricsExportPort\x12\x33\n\x16runtime_env_agent_port\x18\x1b \x01(\x05R\x13runtimeEnvAgentPort\x12Q\n\x0fresources_total\x18\x0b \x03(\x0b\x32(.ray.rpc.GcsNodeInfo.ResourcesTotalEntryR\x0eresourcesTotal\x12\x1b\n\tnode_name\x18\x0c \x01(\tR\x08nodeName\x12\x1f\n\x0binstance_id\x18\r \x01(\tR\ninstanceId\x12$\n\x0enode_type_name\x18\x0e \x01(\tR\x0cnodeTypeName\x12,\n\x12instance_type_name\x18\x0f \x01(\tR\x10instanceTypeName\x12\"\n\rstart_time_ms\x18\x17 \x01(\x04R\x0bstartTimeMs\x12\x1e\n\x0b\x65nd_time_ms\x18\x18 \x01(\x04R\tendTimeMs\x12 \n\x0cis_head_node\x18\x19 \x01(\x08R\nisHeadNode\x12\x38\n\x06labels\x18\x1a \x03(\x0b\x32 .ray.rpc.GcsNodeInfo.LabelsEntryR\x06labels\x12<\n\x0estate_snapshot\x18\x1c \x01(\x0b\x32\x15.ray.rpc.NodeSnapshotR\rstateSnapshot\x12\x35\n\ndeath_info\x18\x1d \x01(\x0b\x32\x16.ray.rpc.NodeDeathInfoR\tdeathInfo\x1a\x41\n\x13ResourcesTotalEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"#\n\x0cGcsNodeState\x12\t\n\x05\x41LIVE\x10\x00\x12\x08\n\x04\x44\x45\x41\x44\x10\x01\"\xb0\x08\n\x0bJobsAPIInfo\x12\x16\n\x06status\x18\x01 \x01(\tR\x06status\x12\x1e\n\nentrypoint\x18\x02 \x01(\tR\nentrypoint\x12\x1d\n\x07message\x18\x03 \x01(\tH\x00R\x07message\x88\x01\x01\x12\"\n\nerror_type\x18\x04 \x01(\tH\x01R\terrorType\x88\x01\x01\x12\"\n\nstart_time\x18\x05 \x01(\x04H\x02R\tstartTime\x88\x01\x01\x12\x1e\n\x08\x65nd_time\x18\x06 \x01(\x04H\x03R\x07\x65ndTime\x88\x01\x01\x12>\n\x08metadata\x18\x07 \x03(\x0b\x32\".ray.rpc.JobsAPIInfo.MetadataEntryR\x08metadata\x12-\n\x10runtime_env_json\x18\x08 \x01(\tH\x04R\x0eruntimeEnvJson\x88\x01\x01\x12\x33\n\x13\x65ntrypoint_num_cpus\x18\t \x01(\x01H\x05R\x11\x65ntrypointNumCpus\x88\x01\x01\x12\x33\n\x13\x65ntrypoint_num_gpus\x18\n \x01(\x01H\x06R\x11\x65ntrypointNumGpus\x88\x01\x01\x12`\n\x14\x65ntrypoint_resources\x18\x0b \x03(\x0b\x32-.ray.rpc.JobsAPIInfo.EntrypointResourcesEntryR\x13\x65ntrypointResources\x12>\n\x19\x64river_agent_http_address\x18\x0c \x01(\tH\x07R\x16\x64riverAgentHttpAddress\x88\x01\x01\x12)\n\x0e\x64river_node_id\x18\r \x01(\tH\x08R\x0c\x64riverNodeId\x88\x01\x01\x12-\n\x10\x64river_exit_code\x18\x0e \x01(\x05H\tR\x0e\x64riverExitCode\x88\x01\x01\x12\x30\n\x11\x65ntrypoint_memory\x18\x0f \x01(\x04H\nR\x10\x65ntrypointMemory\x88\x01\x01\x1a;\n\rMetadataEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x46\n\x18\x45ntrypointResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x42\n\n\x08_messageB\r\n\x0b_error_typeB\r\n\x0b_start_timeB\x0b\n\t_end_timeB\x13\n\x11_runtime_env_jsonB\x16\n\x14_entrypoint_num_cpusB\x16\n\x14_entrypoint_num_gpusB\x1c\n\x1a_driver_agent_http_addressB\x11\n\x0f_driver_node_idB\x13\n\x11_driver_exit_codeB\x14\n\x12_entrypoint_memory\"\xd7\x06\n\x0fWorkerTableData\x12\x19\n\x08is_alive\x18\x01 \x01(\x08R\x07isAlive\x12\x37\n\x0eworker_address\x18\x02 \x01(\x0b\x32\x10.ray.rpc.AddressR\rworkerAddress\x12\x1c\n\ttimestamp\x18\x03 \x01(\x03R\ttimestamp\x12\x34\n\x0bworker_type\x18\x05 \x01(\x0e\x32\x13.ray.rpc.WorkerTypeR\nworkerType\x12I\n\x0bworker_info\x18\x06 \x03(\x0b\x32(.ray.rpc.WorkerTableData.WorkerInfoEntryR\nworkerInfo\x12M\n\x17\x63reation_task_exception\x18\x12 \x01(\x0b\x32\x15.ray.rpc.RayExceptionR\x15\x63reationTaskException\x12\x39\n\texit_type\x18\x13 \x01(\x0e\x32\x17.ray.rpc.WorkerExitTypeH\x00R\x08\x65xitType\x88\x01\x01\x12$\n\x0b\x65xit_detail\x18\x14 \x01(\tH\x01R\nexitDetail\x88\x01\x01\x12\x10\n\x03pid\x18\x15 \x01(\rR\x03pid\x12\"\n\rstart_time_ms\x18\x17 \x01(\x04R\x0bstartTimeMs\x12\x1e\n\x0b\x65nd_time_ms\x18\x18 \x01(\x04R\tendTimeMs\x12\x31\n\x15worker_launch_time_ms\x18\x19 \x01(\x04R\x12workerLaunchTimeMs\x12\x35\n\x17worker_launched_time_ms\x18\x1a \x01(\x04R\x14workerLaunchedTimeMs\x12(\n\rdebugger_port\x18\x1b \x01(\rH\x02R\x0c\x64\x65\x62uggerPort\x88\x01\x01\x12\x31\n\x12num_paused_threads\x18\x1c \x01(\rH\x03R\x10numPausedThreads\x88\x01\x01\x1a=\n\x0fWorkerInfoEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05value:\x02\x38\x01\x42\x0c\n\n_exit_typeB\x0e\n\x0c_exit_detailB\x10\n\x0e_debugger_portB\x15\n\x13_num_paused_threads\"K\n\x0fWorkerDeltaData\x12\x1b\n\traylet_id\x18\x01 \x01(\x0cR\x08rayletId\x12\x1b\n\tworker_id\x18\x02 \x01(\x0cR\x08workerId\"&\n\x0cStoredConfig\x12\x16\n\x06\x63onfig\x18\x01 \x01(\tR\x06\x63onfig\"3\n\rPubSubMessage\x12\x0e\n\x02id\x18\x01 \x01(\x0cR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\"\xa7\x02\n\x0eResourceDemand\x12\x38\n\x05shape\x18\x01 \x03(\x0b\x32\".ray.rpc.ResourceDemand.ShapeEntryR\x05shape\x12\x39\n\x19num_ready_requests_queued\x18\x02 \x01(\x04R\x16numReadyRequestsQueued\x12\x43\n\x1enum_infeasible_requests_queued\x18\x03 \x01(\x04R\x1bnumInfeasibleRequestsQueued\x12!\n\x0c\x62\x61\x63klog_size\x18\x04 \x01(\x03R\x0b\x62\x61\x63klogSize\x1a\x38\n\nShapeEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\"R\n\x0cResourceLoad\x12\x42\n\x10resource_demands\x18\x01 \x03(\x0b\x32\x17.ray.rpc.ResourceDemandR\x0fresourceDemands\"\xc5\t\n\rResourcesData\x12\x17\n\x07node_id\x18\x01 \x01(\x0cR\x06nodeId\x12_\n\x13resources_available\x18\x02 \x03(\x0b\x32..ray.rpc.ResourcesData.ResourcesAvailableEntryR\x12resourcesAvailable\x12S\n\x0fresources_total\x18\x04 \x03(\x0b\x32*.ray.rpc.ResourcesData.ResourcesTotalEntryR\x0eresourcesTotal\x12M\n\rresource_load\x18\x05 \x03(\x0b\x32(.ray.rpc.ResourcesData.ResourceLoadEntryR\x0cresourceLoad\x12J\n\x16resource_load_by_shape\x18\x07 \x01(\x0b\x32\x15.ray.rpc.ResourceLoadR\x13resourceLoadByShape\x12\x30\n\x14node_manager_address\x18\t \x01(\tR\x12nodeManagerAddress\x12.\n\x13object_pulls_queued\x18\n \x01(\x08R\x11objectPullsQueued\x12\x63\n\x15resources_normal_task\x18\x0b \x03(\x0b\x32/.ray.rpc.ResourcesData.ResourcesNormalTaskEntryR\x13resourcesNormalTask\x12\x41\n\x1dresources_normal_task_changed\x18\x0c \x01(\x08R\x1aresourcesNormalTaskChanged\x12\x45\n\x1fresources_normal_task_timestamp\x18\r \x01(\x03R\x1cresourcesNormalTaskTimestamp\x12\x44\n\x1f\x63luster_full_of_actors_detected\x18\x0e \x01(\x08R\x1b\x63lusterFullOfActorsDetected\x12(\n\x10idle_duration_ms\x18\x0f \x01(\x03R\x0eidleDurationMs\x12\x1f\n\x0bis_draining\x18\x10 \x01(\x08R\nisDraining\x12\x43\n\x1e\x64raining_deadline_timestamp_ms\x18\x12 \x01(\x03R\x1b\x64rainingDeadlineTimestampMs\x1a\x45\n\x17ResourcesAvailableEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x1a\x41\n\x13ResourcesTotalEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x1a?\n\x11ResourceLoadEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x1a\x46\n\x18ResourcesNormalTaskEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01J\x04\x08\x03\x10\x04J\x04\x08\x08\x10\tJ\x04\x08\x11\x10\x12\"\xe1\x01\n\x16ResourceUsageBatchData\x12,\n\x05\x62\x61tch\x18\x01 \x03(\x0b\x32\x16.ray.rpc.ResourcesDataR\x05\x62\x61tch\x12J\n\x16resource_load_by_shape\x18\x02 \x01(\x0b\x32\x15.ray.rpc.ResourceLoadR\x13resourceLoadByShape\x12M\n\x14placement_group_load\x18\x03 \x01(\x0b\x32\x1b.ray.rpc.PlacementGroupLoadR\x12placementGroupLoad\"h\n\x12PlacementGroupLoad\x12R\n\x14placement_group_data\x18\x01 \x03(\x0b\x32 .ray.rpc.PlacementGroupTableDataR\x12placementGroupData\"\xdc\x04\n\x13PlacementGroupStats\x12?\n\x1c\x63reation_request_received_ns\x18\x02 \x01(\x03R\x19\x63reationRequestReceivedNs\x12;\n\x1ascheduling_started_time_ns\x18\x03 \x01(\x03R\x17schedulingStartedTimeNs\x12\x32\n\x15scheduling_latency_us\x18\x04 \x01(\x03R\x13schedulingLatencyUs\x12\x41\n\x1e\x65nd_to_end_creation_latency_us\x18\x05 \x01(\x03R\x19\x65ndToEndCreationLatencyUs\x12-\n\x12scheduling_attempt\x18\x06 \x01(\rR\x11schedulingAttempt\x12\x33\n\x16highest_retry_delay_ms\x18\x07 \x01(\x01R\x13highestRetryDelayMs\x12W\n\x10scheduling_state\x18\x08 \x01(\x0e\x32,.ray.rpc.PlacementGroupStats.SchedulingStateR\x0fschedulingState\"\x92\x01\n\x0fSchedulingState\x12\n\n\x06QUEUED\x10\x00\x12\x0b\n\x07REMOVED\x10\x01\x12\x16\n\x12SCHEDULING_STARTED\x10\x02\x12\x10\n\x0cNO_RESOURCES\x10\x03\x12\x0e\n\nINFEASIBLE\x10\x04\x12\x1e\n\x1a\x46\x41ILED_TO_COMMIT_RESOURCES\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\"\xb1\x07\n\x17PlacementGroupTableData\x12,\n\x12placement_group_id\x18\x01 \x01(\x0cR\x10placementGroupId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12)\n\x07\x62undles\x18\x03 \x03(\x0b\x32\x0f.ray.rpc.BundleR\x07\x62undles\x12\x36\n\x08strategy\x18\x04 \x01(\x0e\x32\x1a.ray.rpc.PlacementStrategyR\x08strategy\x12J\n\x05state\x18\x05 \x01(\x0e\x32\x34.ray.rpc.PlacementGroupTableData.PlacementGroupStateR\x05state\x12$\n\x0e\x63reator_job_id\x18\x06 \x01(\x0cR\x0c\x63reatorJobId\x12(\n\x10\x63reator_actor_id\x18\x07 \x01(\x0cR\x0e\x63reatorActorId\x12(\n\x10\x63reator_job_dead\x18\x08 \x01(\x08R\x0e\x63reatorJobDead\x12,\n\x12\x63reator_actor_dead\x18\t \x01(\x08R\x10\x63reatorActorDead\x12\x1f\n\x0bis_detached\x18\n \x01(\x08R\nisDetached\x12#\n\rray_namespace\x18\x0b \x01(\tR\x0crayNamespace\x12\x32\n\x05stats\x18\x0c \x01(\x0b\x32\x1c.ray.rpc.PlacementGroupStatsR\x05stats\x12\x38\n\x19max_cpu_fraction_per_node\x18\r \x01(\x01R\x15maxCpuFractionPerNode\x12-\n\x13soft_target_node_id\x18\x0e \x01(\x0cR\x10softTargetNodeId\x12P\n%placement_group_creation_timestamp_ms\x18\x0f \x01(\x03R!placementGroupCreationTimestampMs\x12j\n3placement_group_final_bundle_placement_timestamp_ms\x18\x10 \x01(\x03R-placementGroupFinalBundlePlacementTimestampMs\"\\\n\x13PlacementGroupState\x12\x0b\n\x07PENDING\x10\x00\x12\x0c\n\x08PREPARED\x10\x01\x12\x0b\n\x07\x43REATED\x10\x02\x12\x0b\n\x07REMOVED\x10\x03\x12\x10\n\x0cRESCHEDULING\x10\x04\"\xed\x03\n\x0cJobTableData\x12\x15\n\x06job_id\x18\x01 \x01(\x0cR\x05jobId\x12\x17\n\x07is_dead\x18\x02 \x01(\x08R\x06isDead\x12\x1c\n\ttimestamp\x18\x03 \x01(\x03R\ttimestamp\x12*\n\x11\x64river_ip_address\x18\x04 \x01(\tR\x0f\x64riverIpAddress\x12\x1d\n\ndriver_pid\x18\x05 \x01(\x03R\tdriverPid\x12*\n\x06\x63onfig\x18\x06 \x01(\x0b\x32\x12.ray.rpc.JobConfigR\x06\x63onfig\x12\x1d\n\nstart_time\x18\x07 \x01(\x04R\tstartTime\x12\x19\n\x08\x65nd_time\x18\x08 \x01(\x04R\x07\x65ndTime\x12\x1e\n\nentrypoint\x18\t \x01(\tR\nentrypoint\x12\x34\n\x08job_info\x18\n \x01(\x0b\x32\x14.ray.rpc.JobsAPIInfoH\x00R\x07jobInfo\x88\x01\x01\x12-\n\x10is_running_tasks\x18\x0b \x01(\x08H\x01R\x0eisRunningTasks\x88\x01\x01\x12\x37\n\x0e\x64river_address\x18\x0c \x01(\x0b\x32\x10.ray.rpc.AddressR\rdriverAddressB\x0b\n\t_job_infoB\x13\n\x11_is_running_tasks*\xdc\x02\n\x0bTablePrefix\x12\x14\n\x10TABLE_PREFIX_MIN\x10\x00\x12\n\n\x06UNUSED\x10\x01\x12\x08\n\x04TASK\x10\x02\x12\x0f\n\x0bRAYLET_TASK\x10\x03\x12\x08\n\x04NODE\x10\x04\x12\n\n\x06OBJECT\x10\x05\x12\t\n\x05\x41\x43TOR\x10\x06\x12\x0c\n\x08\x46UNCTION\x10\x07\x12\x17\n\x13TASK_RECONSTRUCTION\x10\x08\x12\x18\n\x14RESOURCE_USAGE_BATCH\x10\t\x12\x07\n\x03JOB\x10\n\x12\x0e\n\nTASK_LEASE\x10\x0c\x12\x11\n\rNODE_RESOURCE\x10\r\x12\x10\n\x0c\x44IRECT_ACTOR\x10\x0e\x12\x0b\n\x07WORKERS\x10\x0f\x12\x13\n\x0fINTERNAL_CONFIG\x10\x10\x12\x1c\n\x18PLACEMENT_GROUP_SCHEDULE\x10\x11\x12\x13\n\x0fPLACEMENT_GROUP\x10\x12\x12\x06\n\x02KV\x10\x13\x12\x13\n\x0f\x41\x43TOR_TASK_SPEC\x10\x14*\xbe\x02\n\x0bTablePubsub\x12\x14\n\x10TABLE_PUBSUB_MIN\x10\x00\x12\x0e\n\nNO_PUBLISH\x10\x01\x12\x0f\n\x0bTASK_PUBSUB\x10\x02\x12\x16\n\x12RAYLET_TASK_PUBSUB\x10\x03\x12\x0f\n\x0bNODE_PUBSUB\x10\x04\x12\x11\n\rOBJECT_PUBSUB\x10\x05\x12\x10\n\x0c\x41\x43TOR_PUBSUB\x10\x06\x12\x1f\n\x1bRESOURCE_USAGE_BATCH_PUBSUB\x10\x07\x12\x15\n\x11TASK_LEASE_PUBSUB\x10\x08\x12\x0e\n\nJOB_PUBSUB\x10\t\x12\x18\n\x14NODE_RESOURCE_PUBSUB\x10\n\x12\x17\n\x13\x44IRECT_ACTOR_PUBSUB\x10\x0b\x12\x19\n\x15WORKER_FAILURE_PUBSUB\x10\x0c\x12\x14\n\x10TABLE_PUBSUB_MAX\x10\r*.\n\rGcsChangeMode\x12\x11\n\rAPPEND_OR_ADD\x10\x00\x12\n\n\x06REMOVE\x10\x01\x42\x1d\n\x18io.ray.runtime.generated\xf8\x01\x01\x62\x06proto3')
20
+
21
+ _TABLEPREFIX = DESCRIPTOR.enum_types_by_name['TablePrefix']
22
+ TablePrefix = enum_type_wrapper.EnumTypeWrapper(_TABLEPREFIX)
23
+ _TABLEPUBSUB = DESCRIPTOR.enum_types_by_name['TablePubsub']
24
+ TablePubsub = enum_type_wrapper.EnumTypeWrapper(_TABLEPUBSUB)
25
+ _GCSCHANGEMODE = DESCRIPTOR.enum_types_by_name['GcsChangeMode']
26
+ GcsChangeMode = enum_type_wrapper.EnumTypeWrapper(_GCSCHANGEMODE)
27
+ TABLE_PREFIX_MIN = 0
28
+ UNUSED = 1
29
+ TASK = 2
30
+ RAYLET_TASK = 3
31
+ NODE = 4
32
+ OBJECT = 5
33
+ ACTOR = 6
34
+ FUNCTION = 7
35
+ TASK_RECONSTRUCTION = 8
36
+ RESOURCE_USAGE_BATCH = 9
37
+ JOB = 10
38
+ TASK_LEASE = 12
39
+ NODE_RESOURCE = 13
40
+ DIRECT_ACTOR = 14
41
+ WORKERS = 15
42
+ INTERNAL_CONFIG = 16
43
+ PLACEMENT_GROUP_SCHEDULE = 17
44
+ PLACEMENT_GROUP = 18
45
+ KV = 19
46
+ ACTOR_TASK_SPEC = 20
47
+ TABLE_PUBSUB_MIN = 0
48
+ NO_PUBLISH = 1
49
+ TASK_PUBSUB = 2
50
+ RAYLET_TASK_PUBSUB = 3
51
+ NODE_PUBSUB = 4
52
+ OBJECT_PUBSUB = 5
53
+ ACTOR_PUBSUB = 6
54
+ RESOURCE_USAGE_BATCH_PUBSUB = 7
55
+ TASK_LEASE_PUBSUB = 8
56
+ JOB_PUBSUB = 9
57
+ NODE_RESOURCE_PUBSUB = 10
58
+ DIRECT_ACTOR_PUBSUB = 11
59
+ WORKER_FAILURE_PUBSUB = 12
60
+ TABLE_PUBSUB_MAX = 13
61
+ APPEND_OR_ADD = 0
62
+ REMOVE = 1
63
+
64
+
65
+ _GCSENTRY = DESCRIPTOR.message_types_by_name['GcsEntry']
66
+ _ACTORTABLEDATA = DESCRIPTOR.message_types_by_name['ActorTableData']
67
+ _ACTORTABLEDATA_REQUIREDRESOURCESENTRY = _ACTORTABLEDATA.nested_types_by_name['RequiredResourcesEntry']
68
+ _ERRORTABLEDATA = DESCRIPTOR.message_types_by_name['ErrorTableData']
69
+ _PROFILEEVENTENTRY = DESCRIPTOR.message_types_by_name['ProfileEventEntry']
70
+ _PROFILEEVENTS = DESCRIPTOR.message_types_by_name['ProfileEvents']
71
+ _TASKLOGINFO = DESCRIPTOR.message_types_by_name['TaskLogInfo']
72
+ _TASKSTATEUPDATE = DESCRIPTOR.message_types_by_name['TaskStateUpdate']
73
+ _TASKSTATEUPDATE_STATETSNSENTRY = _TASKSTATEUPDATE.nested_types_by_name['StateTsNsEntry']
74
+ _TASKEVENTS = DESCRIPTOR.message_types_by_name['TaskEvents']
75
+ _TASKATTEMPT = DESCRIPTOR.message_types_by_name['TaskAttempt']
76
+ _TASKEVENTDATA = DESCRIPTOR.message_types_by_name['TaskEventData']
77
+ _AVAILABLERESOURCES = DESCRIPTOR.message_types_by_name['AvailableResources']
78
+ _AVAILABLERESOURCES_RESOURCESAVAILABLEENTRY = _AVAILABLERESOURCES.nested_types_by_name['ResourcesAvailableEntry']
79
+ _TOTALRESOURCES = DESCRIPTOR.message_types_by_name['TotalResources']
80
+ _TOTALRESOURCES_RESOURCESTOTALENTRY = _TOTALRESOURCES.nested_types_by_name['ResourcesTotalEntry']
81
+ _NODESNAPSHOT = DESCRIPTOR.message_types_by_name['NodeSnapshot']
82
+ _GCSNODEINFO = DESCRIPTOR.message_types_by_name['GcsNodeInfo']
83
+ _GCSNODEINFO_RESOURCESTOTALENTRY = _GCSNODEINFO.nested_types_by_name['ResourcesTotalEntry']
84
+ _GCSNODEINFO_LABELSENTRY = _GCSNODEINFO.nested_types_by_name['LabelsEntry']
85
+ _JOBSAPIINFO = DESCRIPTOR.message_types_by_name['JobsAPIInfo']
86
+ _JOBSAPIINFO_METADATAENTRY = _JOBSAPIINFO.nested_types_by_name['MetadataEntry']
87
+ _JOBSAPIINFO_ENTRYPOINTRESOURCESENTRY = _JOBSAPIINFO.nested_types_by_name['EntrypointResourcesEntry']
88
+ _WORKERTABLEDATA = DESCRIPTOR.message_types_by_name['WorkerTableData']
89
+ _WORKERTABLEDATA_WORKERINFOENTRY = _WORKERTABLEDATA.nested_types_by_name['WorkerInfoEntry']
90
+ _WORKERDELTADATA = DESCRIPTOR.message_types_by_name['WorkerDeltaData']
91
+ _STOREDCONFIG = DESCRIPTOR.message_types_by_name['StoredConfig']
92
+ _PUBSUBMESSAGE = DESCRIPTOR.message_types_by_name['PubSubMessage']
93
+ _RESOURCEDEMAND = DESCRIPTOR.message_types_by_name['ResourceDemand']
94
+ _RESOURCEDEMAND_SHAPEENTRY = _RESOURCEDEMAND.nested_types_by_name['ShapeEntry']
95
+ _RESOURCELOAD = DESCRIPTOR.message_types_by_name['ResourceLoad']
96
+ _RESOURCESDATA = DESCRIPTOR.message_types_by_name['ResourcesData']
97
+ _RESOURCESDATA_RESOURCESAVAILABLEENTRY = _RESOURCESDATA.nested_types_by_name['ResourcesAvailableEntry']
98
+ _RESOURCESDATA_RESOURCESTOTALENTRY = _RESOURCESDATA.nested_types_by_name['ResourcesTotalEntry']
99
+ _RESOURCESDATA_RESOURCELOADENTRY = _RESOURCESDATA.nested_types_by_name['ResourceLoadEntry']
100
+ _RESOURCESDATA_RESOURCESNORMALTASKENTRY = _RESOURCESDATA.nested_types_by_name['ResourcesNormalTaskEntry']
101
+ _RESOURCEUSAGEBATCHDATA = DESCRIPTOR.message_types_by_name['ResourceUsageBatchData']
102
+ _PLACEMENTGROUPLOAD = DESCRIPTOR.message_types_by_name['PlacementGroupLoad']
103
+ _PLACEMENTGROUPSTATS = DESCRIPTOR.message_types_by_name['PlacementGroupStats']
104
+ _PLACEMENTGROUPTABLEDATA = DESCRIPTOR.message_types_by_name['PlacementGroupTableData']
105
+ _JOBTABLEDATA = DESCRIPTOR.message_types_by_name['JobTableData']
106
+ _ACTORTABLEDATA_ACTORSTATE = _ACTORTABLEDATA.enum_types_by_name['ActorState']
107
+ _NODESNAPSHOT_STATE = _NODESNAPSHOT.enum_types_by_name['State']
108
+ _GCSNODEINFO_GCSNODESTATE = _GCSNODEINFO.enum_types_by_name['GcsNodeState']
109
+ _PLACEMENTGROUPSTATS_SCHEDULINGSTATE = _PLACEMENTGROUPSTATS.enum_types_by_name['SchedulingState']
110
+ _PLACEMENTGROUPTABLEDATA_PLACEMENTGROUPSTATE = _PLACEMENTGROUPTABLEDATA.enum_types_by_name['PlacementGroupState']
111
+ GcsEntry = _reflection.GeneratedProtocolMessageType('GcsEntry', (_message.Message,), {
112
+ 'DESCRIPTOR' : _GCSENTRY,
113
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
114
+ # @@protoc_insertion_point(class_scope:ray.rpc.GcsEntry)
115
+ })
116
+ _sym_db.RegisterMessage(GcsEntry)
117
+
118
+ ActorTableData = _reflection.GeneratedProtocolMessageType('ActorTableData', (_message.Message,), {
119
+
120
+ 'RequiredResourcesEntry' : _reflection.GeneratedProtocolMessageType('RequiredResourcesEntry', (_message.Message,), {
121
+ 'DESCRIPTOR' : _ACTORTABLEDATA_REQUIREDRESOURCESENTRY,
122
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
123
+ # @@protoc_insertion_point(class_scope:ray.rpc.ActorTableData.RequiredResourcesEntry)
124
+ })
125
+ ,
126
+ 'DESCRIPTOR' : _ACTORTABLEDATA,
127
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
128
+ # @@protoc_insertion_point(class_scope:ray.rpc.ActorTableData)
129
+ })
130
+ _sym_db.RegisterMessage(ActorTableData)
131
+ _sym_db.RegisterMessage(ActorTableData.RequiredResourcesEntry)
132
+
133
+ ErrorTableData = _reflection.GeneratedProtocolMessageType('ErrorTableData', (_message.Message,), {
134
+ 'DESCRIPTOR' : _ERRORTABLEDATA,
135
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
136
+ # @@protoc_insertion_point(class_scope:ray.rpc.ErrorTableData)
137
+ })
138
+ _sym_db.RegisterMessage(ErrorTableData)
139
+
140
+ ProfileEventEntry = _reflection.GeneratedProtocolMessageType('ProfileEventEntry', (_message.Message,), {
141
+ 'DESCRIPTOR' : _PROFILEEVENTENTRY,
142
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
143
+ # @@protoc_insertion_point(class_scope:ray.rpc.ProfileEventEntry)
144
+ })
145
+ _sym_db.RegisterMessage(ProfileEventEntry)
146
+
147
+ ProfileEvents = _reflection.GeneratedProtocolMessageType('ProfileEvents', (_message.Message,), {
148
+ 'DESCRIPTOR' : _PROFILEEVENTS,
149
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
150
+ # @@protoc_insertion_point(class_scope:ray.rpc.ProfileEvents)
151
+ })
152
+ _sym_db.RegisterMessage(ProfileEvents)
153
+
154
+ TaskLogInfo = _reflection.GeneratedProtocolMessageType('TaskLogInfo', (_message.Message,), {
155
+ 'DESCRIPTOR' : _TASKLOGINFO,
156
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
157
+ # @@protoc_insertion_point(class_scope:ray.rpc.TaskLogInfo)
158
+ })
159
+ _sym_db.RegisterMessage(TaskLogInfo)
160
+
161
+ TaskStateUpdate = _reflection.GeneratedProtocolMessageType('TaskStateUpdate', (_message.Message,), {
162
+
163
+ 'StateTsNsEntry' : _reflection.GeneratedProtocolMessageType('StateTsNsEntry', (_message.Message,), {
164
+ 'DESCRIPTOR' : _TASKSTATEUPDATE_STATETSNSENTRY,
165
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
166
+ # @@protoc_insertion_point(class_scope:ray.rpc.TaskStateUpdate.StateTsNsEntry)
167
+ })
168
+ ,
169
+ 'DESCRIPTOR' : _TASKSTATEUPDATE,
170
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
171
+ # @@protoc_insertion_point(class_scope:ray.rpc.TaskStateUpdate)
172
+ })
173
+ _sym_db.RegisterMessage(TaskStateUpdate)
174
+ _sym_db.RegisterMessage(TaskStateUpdate.StateTsNsEntry)
175
+
176
+ TaskEvents = _reflection.GeneratedProtocolMessageType('TaskEvents', (_message.Message,), {
177
+ 'DESCRIPTOR' : _TASKEVENTS,
178
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
179
+ # @@protoc_insertion_point(class_scope:ray.rpc.TaskEvents)
180
+ })
181
+ _sym_db.RegisterMessage(TaskEvents)
182
+
183
+ TaskAttempt = _reflection.GeneratedProtocolMessageType('TaskAttempt', (_message.Message,), {
184
+ 'DESCRIPTOR' : _TASKATTEMPT,
185
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
186
+ # @@protoc_insertion_point(class_scope:ray.rpc.TaskAttempt)
187
+ })
188
+ _sym_db.RegisterMessage(TaskAttempt)
189
+
190
+ TaskEventData = _reflection.GeneratedProtocolMessageType('TaskEventData', (_message.Message,), {
191
+ 'DESCRIPTOR' : _TASKEVENTDATA,
192
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
193
+ # @@protoc_insertion_point(class_scope:ray.rpc.TaskEventData)
194
+ })
195
+ _sym_db.RegisterMessage(TaskEventData)
196
+
197
+ AvailableResources = _reflection.GeneratedProtocolMessageType('AvailableResources', (_message.Message,), {
198
+
199
+ 'ResourcesAvailableEntry' : _reflection.GeneratedProtocolMessageType('ResourcesAvailableEntry', (_message.Message,), {
200
+ 'DESCRIPTOR' : _AVAILABLERESOURCES_RESOURCESAVAILABLEENTRY,
201
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
202
+ # @@protoc_insertion_point(class_scope:ray.rpc.AvailableResources.ResourcesAvailableEntry)
203
+ })
204
+ ,
205
+ 'DESCRIPTOR' : _AVAILABLERESOURCES,
206
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
207
+ # @@protoc_insertion_point(class_scope:ray.rpc.AvailableResources)
208
+ })
209
+ _sym_db.RegisterMessage(AvailableResources)
210
+ _sym_db.RegisterMessage(AvailableResources.ResourcesAvailableEntry)
211
+
212
+ TotalResources = _reflection.GeneratedProtocolMessageType('TotalResources', (_message.Message,), {
213
+
214
+ 'ResourcesTotalEntry' : _reflection.GeneratedProtocolMessageType('ResourcesTotalEntry', (_message.Message,), {
215
+ 'DESCRIPTOR' : _TOTALRESOURCES_RESOURCESTOTALENTRY,
216
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
217
+ # @@protoc_insertion_point(class_scope:ray.rpc.TotalResources.ResourcesTotalEntry)
218
+ })
219
+ ,
220
+ 'DESCRIPTOR' : _TOTALRESOURCES,
221
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
222
+ # @@protoc_insertion_point(class_scope:ray.rpc.TotalResources)
223
+ })
224
+ _sym_db.RegisterMessage(TotalResources)
225
+ _sym_db.RegisterMessage(TotalResources.ResourcesTotalEntry)
226
+
227
+ NodeSnapshot = _reflection.GeneratedProtocolMessageType('NodeSnapshot', (_message.Message,), {
228
+ 'DESCRIPTOR' : _NODESNAPSHOT,
229
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
230
+ # @@protoc_insertion_point(class_scope:ray.rpc.NodeSnapshot)
231
+ })
232
+ _sym_db.RegisterMessage(NodeSnapshot)
233
+
234
+ GcsNodeInfo = _reflection.GeneratedProtocolMessageType('GcsNodeInfo', (_message.Message,), {
235
+
236
+ 'ResourcesTotalEntry' : _reflection.GeneratedProtocolMessageType('ResourcesTotalEntry', (_message.Message,), {
237
+ 'DESCRIPTOR' : _GCSNODEINFO_RESOURCESTOTALENTRY,
238
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
239
+ # @@protoc_insertion_point(class_scope:ray.rpc.GcsNodeInfo.ResourcesTotalEntry)
240
+ })
241
+ ,
242
+
243
+ 'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
244
+ 'DESCRIPTOR' : _GCSNODEINFO_LABELSENTRY,
245
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
246
+ # @@protoc_insertion_point(class_scope:ray.rpc.GcsNodeInfo.LabelsEntry)
247
+ })
248
+ ,
249
+ 'DESCRIPTOR' : _GCSNODEINFO,
250
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
251
+ # @@protoc_insertion_point(class_scope:ray.rpc.GcsNodeInfo)
252
+ })
253
+ _sym_db.RegisterMessage(GcsNodeInfo)
254
+ _sym_db.RegisterMessage(GcsNodeInfo.ResourcesTotalEntry)
255
+ _sym_db.RegisterMessage(GcsNodeInfo.LabelsEntry)
256
+
257
+ JobsAPIInfo = _reflection.GeneratedProtocolMessageType('JobsAPIInfo', (_message.Message,), {
258
+
259
+ 'MetadataEntry' : _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), {
260
+ 'DESCRIPTOR' : _JOBSAPIINFO_METADATAENTRY,
261
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
262
+ # @@protoc_insertion_point(class_scope:ray.rpc.JobsAPIInfo.MetadataEntry)
263
+ })
264
+ ,
265
+
266
+ 'EntrypointResourcesEntry' : _reflection.GeneratedProtocolMessageType('EntrypointResourcesEntry', (_message.Message,), {
267
+ 'DESCRIPTOR' : _JOBSAPIINFO_ENTRYPOINTRESOURCESENTRY,
268
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
269
+ # @@protoc_insertion_point(class_scope:ray.rpc.JobsAPIInfo.EntrypointResourcesEntry)
270
+ })
271
+ ,
272
+ 'DESCRIPTOR' : _JOBSAPIINFO,
273
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
274
+ # @@protoc_insertion_point(class_scope:ray.rpc.JobsAPIInfo)
275
+ })
276
+ _sym_db.RegisterMessage(JobsAPIInfo)
277
+ _sym_db.RegisterMessage(JobsAPIInfo.MetadataEntry)
278
+ _sym_db.RegisterMessage(JobsAPIInfo.EntrypointResourcesEntry)
279
+
280
+ WorkerTableData = _reflection.GeneratedProtocolMessageType('WorkerTableData', (_message.Message,), {
281
+
282
+ 'WorkerInfoEntry' : _reflection.GeneratedProtocolMessageType('WorkerInfoEntry', (_message.Message,), {
283
+ 'DESCRIPTOR' : _WORKERTABLEDATA_WORKERINFOENTRY,
284
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
285
+ # @@protoc_insertion_point(class_scope:ray.rpc.WorkerTableData.WorkerInfoEntry)
286
+ })
287
+ ,
288
+ 'DESCRIPTOR' : _WORKERTABLEDATA,
289
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
290
+ # @@protoc_insertion_point(class_scope:ray.rpc.WorkerTableData)
291
+ })
292
+ _sym_db.RegisterMessage(WorkerTableData)
293
+ _sym_db.RegisterMessage(WorkerTableData.WorkerInfoEntry)
294
+
295
+ WorkerDeltaData = _reflection.GeneratedProtocolMessageType('WorkerDeltaData', (_message.Message,), {
296
+ 'DESCRIPTOR' : _WORKERDELTADATA,
297
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
298
+ # @@protoc_insertion_point(class_scope:ray.rpc.WorkerDeltaData)
299
+ })
300
+ _sym_db.RegisterMessage(WorkerDeltaData)
301
+
302
+ StoredConfig = _reflection.GeneratedProtocolMessageType('StoredConfig', (_message.Message,), {
303
+ 'DESCRIPTOR' : _STOREDCONFIG,
304
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
305
+ # @@protoc_insertion_point(class_scope:ray.rpc.StoredConfig)
306
+ })
307
+ _sym_db.RegisterMessage(StoredConfig)
308
+
309
+ PubSubMessage = _reflection.GeneratedProtocolMessageType('PubSubMessage', (_message.Message,), {
310
+ 'DESCRIPTOR' : _PUBSUBMESSAGE,
311
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
312
+ # @@protoc_insertion_point(class_scope:ray.rpc.PubSubMessage)
313
+ })
314
+ _sym_db.RegisterMessage(PubSubMessage)
315
+
316
+ ResourceDemand = _reflection.GeneratedProtocolMessageType('ResourceDemand', (_message.Message,), {
317
+
318
+ 'ShapeEntry' : _reflection.GeneratedProtocolMessageType('ShapeEntry', (_message.Message,), {
319
+ 'DESCRIPTOR' : _RESOURCEDEMAND_SHAPEENTRY,
320
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
321
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourceDemand.ShapeEntry)
322
+ })
323
+ ,
324
+ 'DESCRIPTOR' : _RESOURCEDEMAND,
325
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
326
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourceDemand)
327
+ })
328
+ _sym_db.RegisterMessage(ResourceDemand)
329
+ _sym_db.RegisterMessage(ResourceDemand.ShapeEntry)
330
+
331
+ ResourceLoad = _reflection.GeneratedProtocolMessageType('ResourceLoad', (_message.Message,), {
332
+ 'DESCRIPTOR' : _RESOURCELOAD,
333
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
334
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourceLoad)
335
+ })
336
+ _sym_db.RegisterMessage(ResourceLoad)
337
+
338
+ ResourcesData = _reflection.GeneratedProtocolMessageType('ResourcesData', (_message.Message,), {
339
+
340
+ 'ResourcesAvailableEntry' : _reflection.GeneratedProtocolMessageType('ResourcesAvailableEntry', (_message.Message,), {
341
+ 'DESCRIPTOR' : _RESOURCESDATA_RESOURCESAVAILABLEENTRY,
342
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
343
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourcesData.ResourcesAvailableEntry)
344
+ })
345
+ ,
346
+
347
+ 'ResourcesTotalEntry' : _reflection.GeneratedProtocolMessageType('ResourcesTotalEntry', (_message.Message,), {
348
+ 'DESCRIPTOR' : _RESOURCESDATA_RESOURCESTOTALENTRY,
349
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
350
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourcesData.ResourcesTotalEntry)
351
+ })
352
+ ,
353
+
354
+ 'ResourceLoadEntry' : _reflection.GeneratedProtocolMessageType('ResourceLoadEntry', (_message.Message,), {
355
+ 'DESCRIPTOR' : _RESOURCESDATA_RESOURCELOADENTRY,
356
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
357
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourcesData.ResourceLoadEntry)
358
+ })
359
+ ,
360
+
361
+ 'ResourcesNormalTaskEntry' : _reflection.GeneratedProtocolMessageType('ResourcesNormalTaskEntry', (_message.Message,), {
362
+ 'DESCRIPTOR' : _RESOURCESDATA_RESOURCESNORMALTASKENTRY,
363
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
364
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourcesData.ResourcesNormalTaskEntry)
365
+ })
366
+ ,
367
+ 'DESCRIPTOR' : _RESOURCESDATA,
368
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
369
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourcesData)
370
+ })
371
+ _sym_db.RegisterMessage(ResourcesData)
372
+ _sym_db.RegisterMessage(ResourcesData.ResourcesAvailableEntry)
373
+ _sym_db.RegisterMessage(ResourcesData.ResourcesTotalEntry)
374
+ _sym_db.RegisterMessage(ResourcesData.ResourceLoadEntry)
375
+ _sym_db.RegisterMessage(ResourcesData.ResourcesNormalTaskEntry)
376
+
377
+ ResourceUsageBatchData = _reflection.GeneratedProtocolMessageType('ResourceUsageBatchData', (_message.Message,), {
378
+ 'DESCRIPTOR' : _RESOURCEUSAGEBATCHDATA,
379
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
380
+ # @@protoc_insertion_point(class_scope:ray.rpc.ResourceUsageBatchData)
381
+ })
382
+ _sym_db.RegisterMessage(ResourceUsageBatchData)
383
+
384
+ PlacementGroupLoad = _reflection.GeneratedProtocolMessageType('PlacementGroupLoad', (_message.Message,), {
385
+ 'DESCRIPTOR' : _PLACEMENTGROUPLOAD,
386
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
387
+ # @@protoc_insertion_point(class_scope:ray.rpc.PlacementGroupLoad)
388
+ })
389
+ _sym_db.RegisterMessage(PlacementGroupLoad)
390
+
391
+ PlacementGroupStats = _reflection.GeneratedProtocolMessageType('PlacementGroupStats', (_message.Message,), {
392
+ 'DESCRIPTOR' : _PLACEMENTGROUPSTATS,
393
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
394
+ # @@protoc_insertion_point(class_scope:ray.rpc.PlacementGroupStats)
395
+ })
396
+ _sym_db.RegisterMessage(PlacementGroupStats)
397
+
398
+ PlacementGroupTableData = _reflection.GeneratedProtocolMessageType('PlacementGroupTableData', (_message.Message,), {
399
+ 'DESCRIPTOR' : _PLACEMENTGROUPTABLEDATA,
400
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
401
+ # @@protoc_insertion_point(class_scope:ray.rpc.PlacementGroupTableData)
402
+ })
403
+ _sym_db.RegisterMessage(PlacementGroupTableData)
404
+
405
+ JobTableData = _reflection.GeneratedProtocolMessageType('JobTableData', (_message.Message,), {
406
+ 'DESCRIPTOR' : _JOBTABLEDATA,
407
+ '__module__' : 'src.ray.protobuf.gcs_pb2'
408
+ # @@protoc_insertion_point(class_scope:ray.rpc.JobTableData)
409
+ })
410
+ _sym_db.RegisterMessage(JobTableData)
411
+
412
+ if _descriptor._USE_C_DESCRIPTORS == False:
413
+
414
+ DESCRIPTOR._options = None
415
+ DESCRIPTOR._serialized_options = b'\n\030io.ray.runtime.generated\370\001\001'
416
+ _ACTORTABLEDATA_REQUIREDRESOURCESENTRY._options = None
417
+ _ACTORTABLEDATA_REQUIREDRESOURCESENTRY._serialized_options = b'8\001'
418
+ _TASKSTATEUPDATE_STATETSNSENTRY._options = None
419
+ _TASKSTATEUPDATE_STATETSNSENTRY._serialized_options = b'8\001'
420
+ _AVAILABLERESOURCES_RESOURCESAVAILABLEENTRY._options = None
421
+ _AVAILABLERESOURCES_RESOURCESAVAILABLEENTRY._serialized_options = b'8\001'
422
+ _TOTALRESOURCES_RESOURCESTOTALENTRY._options = None
423
+ _TOTALRESOURCES_RESOURCESTOTALENTRY._serialized_options = b'8\001'
424
+ _GCSNODEINFO_RESOURCESTOTALENTRY._options = None
425
+ _GCSNODEINFO_RESOURCESTOTALENTRY._serialized_options = b'8\001'
426
+ _GCSNODEINFO_LABELSENTRY._options = None
427
+ _GCSNODEINFO_LABELSENTRY._serialized_options = b'8\001'
428
+ _JOBSAPIINFO_METADATAENTRY._options = None
429
+ _JOBSAPIINFO_METADATAENTRY._serialized_options = b'8\001'
430
+ _JOBSAPIINFO_ENTRYPOINTRESOURCESENTRY._options = None
431
+ _JOBSAPIINFO_ENTRYPOINTRESOURCESENTRY._serialized_options = b'8\001'
432
+ _WORKERTABLEDATA_WORKERINFOENTRY._options = None
433
+ _WORKERTABLEDATA_WORKERINFOENTRY._serialized_options = b'8\001'
434
+ _RESOURCEDEMAND_SHAPEENTRY._options = None
435
+ _RESOURCEDEMAND_SHAPEENTRY._serialized_options = b'8\001'
436
+ _RESOURCESDATA_RESOURCESAVAILABLEENTRY._options = None
437
+ _RESOURCESDATA_RESOURCESAVAILABLEENTRY._serialized_options = b'8\001'
438
+ _RESOURCESDATA_RESOURCESTOTALENTRY._options = None
439
+ _RESOURCESDATA_RESOURCESTOTALENTRY._serialized_options = b'8\001'
440
+ _RESOURCESDATA_RESOURCELOADENTRY._options = None
441
+ _RESOURCESDATA_RESOURCELOADENTRY._serialized_options = b'8\001'
442
+ _RESOURCESDATA_RESOURCESNORMALTASKENTRY._options = None
443
+ _RESOURCESDATA_RESOURCESNORMALTASKENTRY._serialized_options = b'8\001'
444
+ _TABLEPREFIX._serialized_start=11474
445
+ _TABLEPREFIX._serialized_end=11822
446
+ _TABLEPUBSUB._serialized_start=11825
447
+ _TABLEPUBSUB._serialized_end=12143
448
+ _GCSCHANGEMODE._serialized_start=12145
449
+ _GCSCHANGEMODE._serialized_end=12191
450
+ _GCSENTRY._serialized_start=70
451
+ _GCSENTRY._serialized_end=179
452
+ _ACTORTABLEDATA._serialized_start=182
453
+ _ACTORTABLEDATA._serialized_end=1522
454
+ _ACTORTABLEDATA_REQUIREDRESOURCESENTRY._serialized_start=1320
455
+ _ACTORTABLEDATA_REQUIREDRESOURCESENTRY._serialized_end=1388
456
+ _ACTORTABLEDATA_ACTORSTATE._serialized_start=1390
457
+ _ACTORTABLEDATA_ACTORSTATE._serialized_end=1487
458
+ _ERRORTABLEDATA._serialized_start=1524
459
+ _ERRORTABLEDATA._serialized_end=1650
460
+ _PROFILEEVENTENTRY._serialized_start=1653
461
+ _PROFILEEVENTENTRY._serialized_end=1812
462
+ _PROFILEEVENTS._serialized_start=1815
463
+ _PROFILEEVENTS._serialized_end=1996
464
+ _TASKLOGINFO._serialized_start=1999
465
+ _TASKLOGINFO._serialized_end=2336
466
+ _TASKSTATEUPDATE._serialized_start=2339
467
+ _TASKSTATEUPDATE._serialized_end=2926
468
+ _TASKSTATEUPDATE_STATETSNSENTRY._serialized_start=2749
469
+ _TASKSTATEUPDATE_STATETSNSENTRY._serialized_end=2809
470
+ _TASKEVENTS._serialized_start=2929
471
+ _TASKEVENTS._serialized_end=3273
472
+ _TASKATTEMPT._serialized_start=3275
473
+ _TASKATTEMPT._serialized_end=3352
474
+ _TASKEVENTDATA._serialized_start=3355
475
+ _TASKEVENTDATA._serialized_end=3587
476
+ _AVAILABLERESOURCES._serialized_start=3590
477
+ _AVAILABLERESOURCES._serialized_end=3808
478
+ _AVAILABLERESOURCES_RESOURCESAVAILABLEENTRY._serialized_start=3739
479
+ _AVAILABLERESOURCES_RESOURCESAVAILABLEENTRY._serialized_end=3808
480
+ _TOTALRESOURCES._serialized_start=3811
481
+ _TOTALRESOURCES._serialized_end=4005
482
+ _TOTALRESOURCES_RESOURCESTOTALENTRY._serialized_start=3940
483
+ _TOTALRESOURCES_RESOURCESTOTALENTRY._serialized_end=4005
484
+ _NODESNAPSHOT._serialized_start=4008
485
+ _NODESNAPSHOT._serialized_end=4212
486
+ _NODESNAPSHOT_STATE._serialized_start=4154
487
+ _NODESNAPSHOT_STATE._serialized_end=4212
488
+ _GCSNODEINFO._serialized_start=4215
489
+ _GCSNODEINFO._serialized_end=5377
490
+ _GCSNODEINFO_RESOURCESTOTALENTRY._serialized_start=3940
491
+ _GCSNODEINFO_RESOURCESTOTALENTRY._serialized_end=4005
492
+ _GCSNODEINFO_LABELSENTRY._serialized_start=5283
493
+ _GCSNODEINFO_LABELSENTRY._serialized_end=5340
494
+ _GCSNODEINFO_GCSNODESTATE._serialized_start=5342
495
+ _GCSNODEINFO_GCSNODESTATE._serialized_end=5377
496
+ _JOBSAPIINFO._serialized_start=5380
497
+ _JOBSAPIINFO._serialized_end=6452
498
+ _JOBSAPIINFO_METADATAENTRY._serialized_start=6105
499
+ _JOBSAPIINFO_METADATAENTRY._serialized_end=6164
500
+ _JOBSAPIINFO_ENTRYPOINTRESOURCESENTRY._serialized_start=6166
501
+ _JOBSAPIINFO_ENTRYPOINTRESOURCESENTRY._serialized_end=6236
502
+ _WORKERTABLEDATA._serialized_start=6455
503
+ _WORKERTABLEDATA._serialized_end=7310
504
+ _WORKERTABLEDATA_WORKERINFOENTRY._serialized_start=7178
505
+ _WORKERTABLEDATA_WORKERINFOENTRY._serialized_end=7239
506
+ _WORKERDELTADATA._serialized_start=7312
507
+ _WORKERDELTADATA._serialized_end=7387
508
+ _STOREDCONFIG._serialized_start=7389
509
+ _STOREDCONFIG._serialized_end=7427
510
+ _PUBSUBMESSAGE._serialized_start=7429
511
+ _PUBSUBMESSAGE._serialized_end=7480
512
+ _RESOURCEDEMAND._serialized_start=7483
513
+ _RESOURCEDEMAND._serialized_end=7778
514
+ _RESOURCEDEMAND_SHAPEENTRY._serialized_start=7722
515
+ _RESOURCEDEMAND_SHAPEENTRY._serialized_end=7778
516
+ _RESOURCELOAD._serialized_start=7780
517
+ _RESOURCELOAD._serialized_end=7862
518
+ _RESOURCESDATA._serialized_start=7865
519
+ _RESOURCESDATA._serialized_end=9086
520
+ _RESOURCESDATA_RESOURCESAVAILABLEENTRY._serialized_start=3739
521
+ _RESOURCESDATA_RESOURCESAVAILABLEENTRY._serialized_end=3808
522
+ _RESOURCESDATA_RESOURCESTOTALENTRY._serialized_start=3940
523
+ _RESOURCESDATA_RESOURCESTOTALENTRY._serialized_end=4005
524
+ _RESOURCESDATA_RESOURCELOADENTRY._serialized_start=8933
525
+ _RESOURCESDATA_RESOURCELOADENTRY._serialized_end=8996
526
+ _RESOURCESDATA_RESOURCESNORMALTASKENTRY._serialized_start=8998
527
+ _RESOURCESDATA_RESOURCESNORMALTASKENTRY._serialized_end=9068
528
+ _RESOURCEUSAGEBATCHDATA._serialized_start=9089
529
+ _RESOURCEUSAGEBATCHDATA._serialized_end=9314
530
+ _PLACEMENTGROUPLOAD._serialized_start=9316
531
+ _PLACEMENTGROUPLOAD._serialized_end=9420
532
+ _PLACEMENTGROUPSTATS._serialized_start=9423
533
+ _PLACEMENTGROUPSTATS._serialized_end=10027
534
+ _PLACEMENTGROUPSTATS_SCHEDULINGSTATE._serialized_start=9881
535
+ _PLACEMENTGROUPSTATS_SCHEDULINGSTATE._serialized_end=10027
536
+ _PLACEMENTGROUPTABLEDATA._serialized_start=10030
537
+ _PLACEMENTGROUPTABLEDATA._serialized_end=10975
538
+ _PLACEMENTGROUPTABLEDATA_PLACEMENTGROUPSTATE._serialized_start=10883
539
+ _PLACEMENTGROUPTABLEDATA_PLACEMENTGROUPSTATE._serialized_end=10975
540
+ _JOBTABLEDATA._serialized_start=10978
541
+ _JOBTABLEDATA._serialized_end=11471
542
+ # @@protoc_insertion_point(module_scope)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/gcs_pb2_grpc.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/gcs_service_pb2_grpc.py ADDED
The diff for this file is too large to render. See raw diff
 
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/job_agent_pb2_grpc.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
5
+ from . import job_agent_pb2 as src_dot_ray_dot_protobuf_dot_job__agent__pb2
6
+
7
+
8
+ class JobAgentServiceStub(object):
9
+ """Missing associated documentation comment in .proto file."""
10
+
11
+ def __init__(self, channel):
12
+ """Constructor.
13
+
14
+ Args:
15
+ channel: A grpc.Channel.
16
+ """
17
+ self.InitializeJobEnv = channel.unary_unary(
18
+ '/ray.rpc.JobAgentService/InitializeJobEnv',
19
+ request_serializer=src_dot_ray_dot_protobuf_dot_job__agent__pb2.InitializeJobEnvRequest.SerializeToString,
20
+ response_deserializer=src_dot_ray_dot_protobuf_dot_job__agent__pb2.InitializeJobEnvReply.FromString,
21
+ )
22
+
23
+
24
+ class JobAgentServiceServicer(object):
25
+ """Missing associated documentation comment in .proto file."""
26
+
27
+ def InitializeJobEnv(self, request, context):
28
+ """Missing associated documentation comment in .proto file."""
29
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
30
+ context.set_details('Method not implemented!')
31
+ raise NotImplementedError('Method not implemented!')
32
+
33
+
34
+ def add_JobAgentServiceServicer_to_server(servicer, server):
35
+ rpc_method_handlers = {
36
+ 'InitializeJobEnv': grpc.unary_unary_rpc_method_handler(
37
+ servicer.InitializeJobEnv,
38
+ request_deserializer=src_dot_ray_dot_protobuf_dot_job__agent__pb2.InitializeJobEnvRequest.FromString,
39
+ response_serializer=src_dot_ray_dot_protobuf_dot_job__agent__pb2.InitializeJobEnvReply.SerializeToString,
40
+ ),
41
+ }
42
+ generic_handler = grpc.method_handlers_generic_handler(
43
+ 'ray.rpc.JobAgentService', rpc_method_handlers)
44
+ server.add_generic_rpc_handlers((generic_handler,))
45
+
46
+
47
+ # This class is part of an EXPERIMENTAL API.
48
+ class JobAgentService(object):
49
+ """Missing associated documentation comment in .proto file."""
50
+
51
+ @staticmethod
52
+ def InitializeJobEnv(request,
53
+ target,
54
+ options=(),
55
+ channel_credentials=None,
56
+ call_credentials=None,
57
+ insecure=False,
58
+ compression=None,
59
+ wait_for_ready=None,
60
+ timeout=None,
61
+ metadata=None):
62
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.JobAgentService/InitializeJobEnv',
63
+ src_dot_ray_dot_protobuf_dot_job__agent__pb2.InitializeJobEnvRequest.SerializeToString,
64
+ src_dot_ray_dot_protobuf_dot_job__agent__pb2.InitializeJobEnvReply.FromString,
65
+ options, channel_credentials,
66
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/ray_client_pb2.py ADDED
@@ -0,0 +1,561 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: src/ray/protobuf/ray_client.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf.internal import enum_type_wrapper
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import message as _message
9
+ from google.protobuf import reflection as _reflection
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ # @@protoc_insertion_point(imports)
12
+
13
+ _sym_db = _symbol_database.Default()
14
+
15
+
16
+
17
+
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!src/ray/protobuf/ray_client.proto\x12\x07ray.rpc\"\xb5\x01\n\x03\x41rg\x12+\n\x05local\x18\x01 \x01(\x0e\x32\x15.ray.rpc.Arg.LocalityR\x05local\x12!\n\x0creference_id\x18\x02 \x01(\x0cR\x0breferenceId\x12\x12\n\x04\x64\x61ta\x18\x03 \x01(\x0cR\x04\x64\x61ta\x12!\n\x04type\x18\x04 \x01(\x0e\x32\r.ray.rpc.TypeR\x04type\"\'\n\x08Locality\x12\x0c\n\x08INTERNED\x10\x00\x12\r\n\tREFERENCE\x10\x01\"6\n\x0bTaskOptions\x12\'\n\x0fpickled_options\x18\x01 \x01(\x0cR\x0epickledOptions\"\xf4\x04\n\nClientTask\x12\x36\n\x04type\x18\x01 \x01(\x0e\x32\".ray.rpc.ClientTask.RemoteExecTypeR\x04type\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1d\n\npayload_id\x18\x03 \x01(\x0cR\tpayloadId\x12 \n\x04\x61rgs\x18\x04 \x03(\x0b\x32\x0c.ray.rpc.ArgR\x04\x61rgs\x12\x37\n\x06kwargs\x18\x05 \x03(\x0b\x32\x1f.ray.rpc.ClientTask.KwargsEntryR\x06kwargs\x12\x1b\n\tclient_id\x18\x06 \x01(\tR\x08\x63lientId\x12.\n\x07options\x18\x07 \x01(\x0b\x32\x14.ray.rpc.TaskOptionsR\x07options\x12?\n\x10\x62\x61seline_options\x18\x08 \x01(\x0b\x32\x14.ray.rpc.TaskOptionsR\x0f\x62\x61selineOptions\x12\x1c\n\tnamespace\x18\t \x01(\tR\tnamespace\x12\x12\n\x04\x64\x61ta\x18\n \x01(\x0cR\x04\x64\x61ta\x12\x19\n\x08\x63hunk_id\x18\x0b \x01(\x05R\x07\x63hunkId\x12!\n\x0ctotal_chunks\x18\x0c \x01(\x05R\x0btotalChunks\x1aG\n\x0bKwargsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\"\n\x05value\x18\x02 \x01(\x0b\x32\x0c.ray.rpc.ArgR\x05value:\x02\x38\x01\"Y\n\x0eRemoteExecType\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\t\n\x05\x41\x43TOR\x10\x01\x12\n\n\x06METHOD\x10\x02\x12\x11\n\rSTATIC_METHOD\x10\x03\x12\x0f\n\x0bNAMED_ACTOR\x10\x04\"]\n\x10\x43lientTaskTicket\x12\x14\n\x05valid\x18\x01 \x01(\x08R\x05valid\x12\x1d\n\nreturn_ids\x18\x02 \x03(\x0cR\treturnIds\x12\x14\n\x05\x65rror\x18\x03 \x01(\x0cR\x05\x65rror\"\xbc\x01\n\nPutRequest\x12\x12\n\x04\x64\x61ta\x18\x01 \x01(\x0cR\x04\x64\x61ta\x12\"\n\rclient_ref_id\x18\x02 \x01(\x0cR\x0b\x63lientRefId\x12\x19\n\x08\x63hunk_id\x18\x03 \x01(\x05R\x07\x63hunkId\x12!\n\x0ctotal_chunks\x18\x04 \x01(\x05R\x0btotalChunks\x12\x1d\n\ntotal_size\x18\x05 \x01(\x03R\ttotalSize\x12\x19\n\x08owner_id\x18\x06 \x01(\x0cR\x07ownerId\"I\n\x0bPutResponse\x12\x0e\n\x02id\x18\x01 \x01(\x0cR\x02id\x12\x14\n\x05valid\x18\x02 \x01(\x08R\x05valid\x12\x14\n\x05\x65rror\x18\x03 \x01(\x0cR\x05\x65rror\"\x96\x01\n\nGetRequest\x12\x10\n\x03ids\x18\x04 \x03(\x0cR\x03ids\x12\x18\n\x07timeout\x18\x02 \x01(\x02R\x07timeout\x12\"\n\x0c\x61synchronous\x18\x03 \x01(\x08R\x0c\x61synchronous\x12$\n\x0estart_chunk_id\x18\x05 \x01(\x05R\x0cstartChunkId\x12\x12\n\x02id\x18\x01 \x01(\x0c\x42\x02\x18\x01R\x02id\"\xaa\x01\n\x0bGetResponse\x12\x14\n\x05valid\x18\x01 \x01(\x08R\x05valid\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12\x14\n\x05\x65rror\x18\x03 \x01(\x0cR\x05\x65rror\x12\x19\n\x08\x63hunk_id\x18\x04 \x01(\x05R\x07\x63hunkId\x12!\n\x0ctotal_chunks\x18\x05 \x01(\x05R\x0btotalChunks\x12\x1d\n\ntotal_size\x18\x06 \x01(\x04R\ttotalSize\"\x84\x01\n\x0bWaitRequest\x12\x1d\n\nobject_ids\x18\x01 \x03(\x0cR\tobjectIds\x12\x1f\n\x0bnum_returns\x18\x02 \x01(\x03R\nnumReturns\x12\x18\n\x07timeout\x18\x03 \x01(\x01R\x07timeout\x12\x1b\n\tclient_id\x18\x04 \x01(\tR\x08\x63lientId\"\x80\x01\n\x0cWaitResponse\x12\x14\n\x05valid\x18\x01 \x01(\x08R\x05valid\x12(\n\x10ready_object_ids\x18\x02 \x03(\x0cR\x0ereadyObjectIds\x12\x30\n\x14remaining_object_ids\x18\x03 \x03(\x0cR\x12remainingObjectIds\"\xad\x01\n\x0f\x43lusterInfoType\"\x99\x01\n\x08TypeEnum\x12\x12\n\x0eIS_INITIALIZED\x10\x00\x12\t\n\x05NODES\x10\x01\x12\x15\n\x11\x43LUSTER_RESOURCES\x10\x02\x12\x17\n\x13\x41VAILABLE_RESOURCES\x10\x03\x12\x13\n\x0fRUNTIME_CONTEXT\x10\x04\x12\x0c\n\x08TIMELINE\x10\x05\x12\x08\n\x04PING\x10\x06\x12\x11\n\rDASHBOARD_URL\x10\x07\"K\n\x12\x43lusterInfoRequest\x12\x35\n\x04type\x18\x01 \x01(\x0e\x32!.ray.rpc.ClusterInfoType.TypeEnumR\x04type\"\x8e\x05\n\x13\x43lusterInfoResponse\x12\x35\n\x04type\x18\x01 \x01(\x0e\x32!.ray.rpc.ClusterInfoType.TypeEnumR\x04type\x12\x14\n\x04json\x18\x02 \x01(\tH\x00R\x04json\x12S\n\x0eresource_table\x18\x03 \x01(\x0b\x32*.ray.rpc.ClusterInfoResponse.ResourceTableH\x00R\rresourceTable\x12V\n\x0fruntime_context\x18\x04 \x01(\x0b\x32+.ray.rpc.ClusterInfoResponse.RuntimeContextH\x00R\x0eruntimeContext\x1a\x96\x01\n\rResourceTable\x12K\n\x05table\x18\x01 \x03(\x0b\x32\x35.ray.rpc.ClusterInfoResponse.ResourceTable.TableEntryR\x05table\x1a\x38\n\nTableEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x01R\x05value:\x02\x38\x01\x1a\xd2\x01\n\x0eRuntimeContext\x12\x15\n\x06job_id\x18\x01 \x01(\x0cR\x05jobId\x12\x17\n\x07node_id\x18\x02 \x01(\x0cR\x06nodeId\x12\x1c\n\tnamespace\x18\x03 \x01(\tR\tnamespace\x12\x1f\n\x0bruntime_env\x18\x04 \x01(\tR\nruntimeEnv\x12\x30\n\x14\x63\x61pture_client_tasks\x18\x05 \x01(\x08R\x12\x63\x61ptureClientTasks\x12\x1f\n\x0bgcs_address\x18\x06 \x01(\tR\ngcsAddressB\x0f\n\rresponse_type\"\xf1\x02\n\x10TerminateRequest\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12@\n\x05\x61\x63tor\x18\x02 \x01(\x0b\x32(.ray.rpc.TerminateRequest.ActorTerminateH\x00R\x05\x61\x63tor\x12P\n\x0btask_object\x18\x03 \x01(\x0b\x32-.ray.rpc.TerminateRequest.TaskObjectTerminateH\x00R\ntaskObject\x1a?\n\x0e\x41\x63torTerminate\x12\x0e\n\x02id\x18\x01 \x01(\x0cR\x02id\x12\x1d\n\nno_restart\x18\x02 \x01(\x08R\tnoRestart\x1aY\n\x13TaskObjectTerminate\x12\x0e\n\x02id\x18\x01 \x01(\x0cR\x02id\x12\x14\n\x05\x66orce\x18\x02 \x01(\x08R\x05\x66orce\x12\x1c\n\trecursive\x18\x03 \x01(\x08R\trecursiveB\x10\n\x0eterminate_type\"#\n\x11TerminateResponse\x12\x0e\n\x02ok\x18\x01 \x01(\x08R\x02ok\"T\n\x0fKVExistsRequest\x12\x10\n\x03key\x18\x01 \x01(\x0cR\x03key\x12!\n\tnamespace\x18\x02 \x01(\x0cH\x00R\tnamespace\x88\x01\x01\x42\x0c\n\n_namespace\"*\n\x10KVExistsResponse\x12\x16\n\x06\x65xists\x18\x01 \x01(\x08R\x06\x65xists\"Q\n\x0cKVGetRequest\x12\x10\n\x03key\x18\x01 \x01(\x0cR\x03key\x12!\n\tnamespace\x18\x02 \x01(\x0cH\x00R\tnamespace\x88\x01\x01\x42\x0c\n\n_namespace\"4\n\rKVGetResponse\x12\x19\n\x05value\x18\x01 \x01(\x0cH\x00R\x05value\x88\x01\x01\x42\x08\n\x06_value\"\x85\x01\n\x0cKVPutRequest\x12\x10\n\x03key\x18\x01 \x01(\x0cR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05value\x12\x1c\n\toverwrite\x18\x03 \x01(\x08R\toverwrite\x12!\n\tnamespace\x18\x04 \x01(\x0cH\x00R\tnamespace\x88\x01\x01\x42\x0c\n\n_namespace\"6\n\rKVPutResponse\x12%\n\x0e\x61lready_exists\x18\x01 \x01(\x08R\ralreadyExists\"u\n\x0cKVDelRequest\x12\x10\n\x03key\x18\x01 \x01(\x0cR\x03key\x12\"\n\rdel_by_prefix\x18\x02 \x01(\x08R\x0b\x64\x65lByPrefix\x12!\n\tnamespace\x18\x03 \x01(\x0cH\x00R\tnamespace\x88\x01\x01\x42\x0c\n\n_namespace\"0\n\rKVDelResponse\x12\x1f\n\x0b\x64\x65leted_num\x18\x01 \x01(\x05R\ndeletedNum\"X\n\rKVListRequest\x12\x16\n\x06prefix\x18\x01 \x01(\x0cR\x06prefix\x12!\n\tnamespace\x18\x02 \x01(\x0cH\x00R\tnamespace\x88\x01\x01\x42\x0c\n\n_namespace\"$\n\x0eKVListResponse\x12\x12\n\x04keys\x18\x01 \x03(\x0cR\x04keys\"T\n\x1d\x43lientPinRuntimeEnvURIRequest\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12!\n\x0c\x65xpiration_s\x18\x02 \x01(\x05R\x0b\x65xpirationS\" \n\x1e\x43lientPinRuntimeEnvURIResponse\"\x8a\x01\n\x0bInitRequest\x12\x1d\n\njob_config\x18\x01 \x01(\x0cR\tjobConfig\x12&\n\x0fray_init_kwargs\x18\x02 \x01(\tR\rrayInitKwargs\x12\x34\n\x16reconnect_grace_period\x18\x03 \x01(\x05R\x14reconnectGracePeriod\"0\n\x0cInitResponse\x12\x0e\n\x02ok\x18\x01 \x01(\x08R\x02ok\x12\x10\n\x03msg\x18\x02 \x01(\tR\x03msg\"\x17\n\x15PrepRuntimeEnvRequest\"\x18\n\x16PrepRuntimeEnvResponse\"E\n\x1c\x43lientListNamedActorsRequest\x12%\n\x0e\x61ll_namespaces\x18\x01 \x01(\x08R\rallNamespaces\"@\n\x1d\x43lientListNamedActorsResponse\x12\x1f\n\x0b\x61\x63tors_json\x18\x01 \x01(\tR\nactorsJson\"\"\n\x0eReleaseRequest\x12\x10\n\x03ids\x18\x01 \x03(\x0cR\x03ids\"!\n\x0fReleaseResponse\x12\x0e\n\x02ok\x18\x02 \x03(\x08R\x02ok\"\x17\n\x15\x43onnectionInfoRequest\"\xcb\x01\n\x16\x43onnectionInfoResponse\x12\x1f\n\x0bnum_clients\x18\x01 \x01(\x05R\nnumClients\x12\x1f\n\x0bray_version\x18\x02 \x01(\tR\nrayVersion\x12\x1d\n\nray_commit\x18\x03 \x01(\tR\trayCommit\x12%\n\x0epython_version\x18\x04 \x01(\tR\rpythonVersion\x12)\n\x10protocol_version\x18\x05 \x01(\tR\x0fprotocolVersion\"\x1a\n\x18\x43onnectionCleanupRequest\"\x1b\n\x19\x43onnectionCleanupResponse\"+\n\x12\x41\x63knowledgeRequest\x12\x15\n\x06req_id\x18\x01 \x01(\x05R\x05reqId\"\xc6\x05\n\x0b\x44\x61taRequest\x12\x15\n\x06req_id\x18\x01 \x01(\x05R\x05reqId\x12\'\n\x03get\x18\x02 \x01(\x0b\x32\x13.ray.rpc.GetRequestH\x00R\x03get\x12\'\n\x03put\x18\x03 \x01(\x0b\x32\x13.ray.rpc.PutRequestH\x00R\x03put\x12\x33\n\x07release\x18\x04 \x01(\x0b\x32\x17.ray.rpc.ReleaseRequestH\x00R\x07release\x12I\n\x0f\x63onnection_info\x18\x05 \x01(\x0b\x32\x1e.ray.rpc.ConnectionInfoRequestH\x00R\x0e\x63onnectionInfo\x12*\n\x04init\x18\x06 \x01(\x0b\x32\x14.ray.rpc.InitRequestH\x00R\x04init\x12J\n\x10prep_runtime_env\x18\x07 \x01(\x0b\x32\x1e.ray.rpc.PrepRuntimeEnvRequestH\x00R\x0eprepRuntimeEnv\x12R\n\x12\x63onnection_cleanup\x18\x08 \x01(\x0b\x32!.ray.rpc.ConnectionCleanupRequestH\x00R\x11\x63onnectionCleanup\x12?\n\x0b\x61\x63knowledge\x18\t \x01(\x0b\x32\x1b.ray.rpc.AcknowledgeRequestH\x00R\x0b\x61\x63knowledge\x12)\n\x04task\x18\n \x01(\x0b\x32\x13.ray.rpc.ClientTaskH\x00R\x04task\x12\x39\n\tterminate\x18\x0b \x01(\x0b\x32\x19.ray.rpc.TerminateRequestH\x00R\tterminate\x12S\n\x11list_named_actors\x18\x0c \x01(\x0b\x32%.ray.rpc.ClientListNamedActorsRequestH\x00R\x0flistNamedActorsB\x06\n\x04type\"\xb5\x05\n\x0c\x44\x61taResponse\x12\x15\n\x06req_id\x18\x01 \x01(\x05R\x05reqId\x12(\n\x03get\x18\x02 \x01(\x0b\x32\x14.ray.rpc.GetResponseH\x00R\x03get\x12(\n\x03put\x18\x03 \x01(\x0b\x32\x14.ray.rpc.PutResponseH\x00R\x03put\x12\x34\n\x07release\x18\x04 \x01(\x0b\x32\x18.ray.rpc.ReleaseResponseH\x00R\x07release\x12J\n\x0f\x63onnection_info\x18\x05 \x01(\x0b\x32\x1f.ray.rpc.ConnectionInfoResponseH\x00R\x0e\x63onnectionInfo\x12+\n\x04init\x18\x06 \x01(\x0b\x32\x15.ray.rpc.InitResponseH\x00R\x04init\x12K\n\x10prep_runtime_env\x18\x07 \x01(\x0b\x32\x1f.ray.rpc.PrepRuntimeEnvResponseH\x00R\x0eprepRuntimeEnv\x12S\n\x12\x63onnection_cleanup\x18\x08 \x01(\x0b\x32\".ray.rpc.ConnectionCleanupResponseH\x00R\x11\x63onnectionCleanup\x12<\n\x0btask_ticket\x18\n \x01(\x0b\x32\x19.ray.rpc.ClientTaskTicketH\x00R\ntaskTicket\x12:\n\tterminate\x18\x0b \x01(\x0b\x32\x1a.ray.rpc.TerminateResponseH\x00R\tterminate\x12T\n\x11list_named_actors\x18\x0c \x01(\x0b\x32&.ray.rpc.ClientListNamedActorsResponseH\x00R\x0flistNamedActorsB\x06\n\x04typeJ\x04\x08\t\x10\nR\x0b\x61\x63knowledge\"J\n\x12LogSettingsRequest\x12\x18\n\x07\x65nabled\x18\x01 \x01(\x08R\x07\x65nabled\x12\x1a\n\x08loglevel\x18\x02 \x01(\x05R\x08loglevel\"E\n\x07LogData\x12\x10\n\x03msg\x18\x01 \x01(\tR\x03msg\x12\x14\n\x05level\x18\x02 \x01(\x05R\x05level\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name*\x13\n\x04Type\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x32\x96\x08\n\x0cRayletDriver\x12\x35\n\x04Init\x12\x14.ray.rpc.InitRequest\x1a\x15.ray.rpc.InitResponse\"\x00\x12S\n\x0ePrepRuntimeEnv\x12\x1e.ray.rpc.PrepRuntimeEnvRequest\x1a\x1f.ray.rpc.PrepRuntimeEnvResponse\"\x00\x12:\n\tGetObject\x12\x13.ray.rpc.GetRequest\x1a\x14.ray.rpc.GetResponse\"\x00\x30\x01\x12\x38\n\tPutObject\x12\x13.ray.rpc.PutRequest\x1a\x14.ray.rpc.PutResponse\"\x00\x12;\n\nWaitObject\x12\x14.ray.rpc.WaitRequest\x1a\x15.ray.rpc.WaitResponse\"\x00\x12<\n\x08Schedule\x12\x13.ray.rpc.ClientTask\x1a\x19.ray.rpc.ClientTaskTicket\"\x00\x12\x44\n\tTerminate\x12\x19.ray.rpc.TerminateRequest\x1a\x1a.ray.rpc.TerminateResponse\"\x00\x12J\n\x0b\x43lusterInfo\x12\x1b.ray.rpc.ClusterInfoRequest\x1a\x1c.ray.rpc.ClusterInfoResponse\"\x00\x12\x38\n\x05KVGet\x12\x15.ray.rpc.KVGetRequest\x1a\x16.ray.rpc.KVGetResponse\"\x00\x12\x38\n\x05KVPut\x12\x15.ray.rpc.KVPutRequest\x1a\x16.ray.rpc.KVPutResponse\"\x00\x12\x38\n\x05KVDel\x12\x15.ray.rpc.KVDelRequest\x1a\x16.ray.rpc.KVDelResponse\"\x00\x12;\n\x06KVList\x12\x16.ray.rpc.KVListRequest\x1a\x17.ray.rpc.KVListResponse\"\x00\x12\x41\n\x08KVExists\x12\x18.ray.rpc.KVExistsRequest\x1a\x19.ray.rpc.KVExistsResponse\"\x00\x12\x62\n\x0fListNamedActors\x12%.ray.rpc.ClientListNamedActorsRequest\x1a&.ray.rpc.ClientListNamedActorsResponse\"\x00\x12\x65\n\x10PinRuntimeEnvURI\x12&.ray.rpc.ClientPinRuntimeEnvURIRequest\x1a\'.ray.rpc.ClientPinRuntimeEnvURIResponse\"\x00\x32S\n\x12RayletDataStreamer\x12=\n\x08\x44\x61tapath\x12\x14.ray.rpc.DataRequest\x1a\x15.ray.rpc.DataResponse\"\x00(\x01\x30\x01\x32U\n\x11RayletLogStreamer\x12@\n\tLogstream\x12\x1b.ray.rpc.LogSettingsRequest\x1a\x10.ray.rpc.LogData\"\x00(\x01\x30\x01\x42\x03\xf8\x01\x01\x62\x06proto3')
19
+
20
+ _TYPE = DESCRIPTOR.enum_types_by_name['Type']
21
+ Type = enum_type_wrapper.EnumTypeWrapper(_TYPE)
22
+ DEFAULT = 0
23
+
24
+
25
+ _ARG = DESCRIPTOR.message_types_by_name['Arg']
26
+ _TASKOPTIONS = DESCRIPTOR.message_types_by_name['TaskOptions']
27
+ _CLIENTTASK = DESCRIPTOR.message_types_by_name['ClientTask']
28
+ _CLIENTTASK_KWARGSENTRY = _CLIENTTASK.nested_types_by_name['KwargsEntry']
29
+ _CLIENTTASKTICKET = DESCRIPTOR.message_types_by_name['ClientTaskTicket']
30
+ _PUTREQUEST = DESCRIPTOR.message_types_by_name['PutRequest']
31
+ _PUTRESPONSE = DESCRIPTOR.message_types_by_name['PutResponse']
32
+ _GETREQUEST = DESCRIPTOR.message_types_by_name['GetRequest']
33
+ _GETRESPONSE = DESCRIPTOR.message_types_by_name['GetResponse']
34
+ _WAITREQUEST = DESCRIPTOR.message_types_by_name['WaitRequest']
35
+ _WAITRESPONSE = DESCRIPTOR.message_types_by_name['WaitResponse']
36
+ _CLUSTERINFOTYPE = DESCRIPTOR.message_types_by_name['ClusterInfoType']
37
+ _CLUSTERINFOREQUEST = DESCRIPTOR.message_types_by_name['ClusterInfoRequest']
38
+ _CLUSTERINFORESPONSE = DESCRIPTOR.message_types_by_name['ClusterInfoResponse']
39
+ _CLUSTERINFORESPONSE_RESOURCETABLE = _CLUSTERINFORESPONSE.nested_types_by_name['ResourceTable']
40
+ _CLUSTERINFORESPONSE_RESOURCETABLE_TABLEENTRY = _CLUSTERINFORESPONSE_RESOURCETABLE.nested_types_by_name['TableEntry']
41
+ _CLUSTERINFORESPONSE_RUNTIMECONTEXT = _CLUSTERINFORESPONSE.nested_types_by_name['RuntimeContext']
42
+ _TERMINATEREQUEST = DESCRIPTOR.message_types_by_name['TerminateRequest']
43
+ _TERMINATEREQUEST_ACTORTERMINATE = _TERMINATEREQUEST.nested_types_by_name['ActorTerminate']
44
+ _TERMINATEREQUEST_TASKOBJECTTERMINATE = _TERMINATEREQUEST.nested_types_by_name['TaskObjectTerminate']
45
+ _TERMINATERESPONSE = DESCRIPTOR.message_types_by_name['TerminateResponse']
46
+ _KVEXISTSREQUEST = DESCRIPTOR.message_types_by_name['KVExistsRequest']
47
+ _KVEXISTSRESPONSE = DESCRIPTOR.message_types_by_name['KVExistsResponse']
48
+ _KVGETREQUEST = DESCRIPTOR.message_types_by_name['KVGetRequest']
49
+ _KVGETRESPONSE = DESCRIPTOR.message_types_by_name['KVGetResponse']
50
+ _KVPUTREQUEST = DESCRIPTOR.message_types_by_name['KVPutRequest']
51
+ _KVPUTRESPONSE = DESCRIPTOR.message_types_by_name['KVPutResponse']
52
+ _KVDELREQUEST = DESCRIPTOR.message_types_by_name['KVDelRequest']
53
+ _KVDELRESPONSE = DESCRIPTOR.message_types_by_name['KVDelResponse']
54
+ _KVLISTREQUEST = DESCRIPTOR.message_types_by_name['KVListRequest']
55
+ _KVLISTRESPONSE = DESCRIPTOR.message_types_by_name['KVListResponse']
56
+ _CLIENTPINRUNTIMEENVURIREQUEST = DESCRIPTOR.message_types_by_name['ClientPinRuntimeEnvURIRequest']
57
+ _CLIENTPINRUNTIMEENVURIRESPONSE = DESCRIPTOR.message_types_by_name['ClientPinRuntimeEnvURIResponse']
58
+ _INITREQUEST = DESCRIPTOR.message_types_by_name['InitRequest']
59
+ _INITRESPONSE = DESCRIPTOR.message_types_by_name['InitResponse']
60
+ _PREPRUNTIMEENVREQUEST = DESCRIPTOR.message_types_by_name['PrepRuntimeEnvRequest']
61
+ _PREPRUNTIMEENVRESPONSE = DESCRIPTOR.message_types_by_name['PrepRuntimeEnvResponse']
62
+ _CLIENTLISTNAMEDACTORSREQUEST = DESCRIPTOR.message_types_by_name['ClientListNamedActorsRequest']
63
+ _CLIENTLISTNAMEDACTORSRESPONSE = DESCRIPTOR.message_types_by_name['ClientListNamedActorsResponse']
64
+ _RELEASEREQUEST = DESCRIPTOR.message_types_by_name['ReleaseRequest']
65
+ _RELEASERESPONSE = DESCRIPTOR.message_types_by_name['ReleaseResponse']
66
+ _CONNECTIONINFOREQUEST = DESCRIPTOR.message_types_by_name['ConnectionInfoRequest']
67
+ _CONNECTIONINFORESPONSE = DESCRIPTOR.message_types_by_name['ConnectionInfoResponse']
68
+ _CONNECTIONCLEANUPREQUEST = DESCRIPTOR.message_types_by_name['ConnectionCleanupRequest']
69
+ _CONNECTIONCLEANUPRESPONSE = DESCRIPTOR.message_types_by_name['ConnectionCleanupResponse']
70
+ _ACKNOWLEDGEREQUEST = DESCRIPTOR.message_types_by_name['AcknowledgeRequest']
71
+ _DATAREQUEST = DESCRIPTOR.message_types_by_name['DataRequest']
72
+ _DATARESPONSE = DESCRIPTOR.message_types_by_name['DataResponse']
73
+ _LOGSETTINGSREQUEST = DESCRIPTOR.message_types_by_name['LogSettingsRequest']
74
+ _LOGDATA = DESCRIPTOR.message_types_by_name['LogData']
75
+ _ARG_LOCALITY = _ARG.enum_types_by_name['Locality']
76
+ _CLIENTTASK_REMOTEEXECTYPE = _CLIENTTASK.enum_types_by_name['RemoteExecType']
77
+ _CLUSTERINFOTYPE_TYPEENUM = _CLUSTERINFOTYPE.enum_types_by_name['TypeEnum']
78
+ Arg = _reflection.GeneratedProtocolMessageType('Arg', (_message.Message,), {
79
+ 'DESCRIPTOR' : _ARG,
80
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
81
+ # @@protoc_insertion_point(class_scope:ray.rpc.Arg)
82
+ })
83
+ _sym_db.RegisterMessage(Arg)
84
+
85
+ TaskOptions = _reflection.GeneratedProtocolMessageType('TaskOptions', (_message.Message,), {
86
+ 'DESCRIPTOR' : _TASKOPTIONS,
87
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
88
+ # @@protoc_insertion_point(class_scope:ray.rpc.TaskOptions)
89
+ })
90
+ _sym_db.RegisterMessage(TaskOptions)
91
+
92
+ ClientTask = _reflection.GeneratedProtocolMessageType('ClientTask', (_message.Message,), {
93
+
94
+ 'KwargsEntry' : _reflection.GeneratedProtocolMessageType('KwargsEntry', (_message.Message,), {
95
+ 'DESCRIPTOR' : _CLIENTTASK_KWARGSENTRY,
96
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
97
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClientTask.KwargsEntry)
98
+ })
99
+ ,
100
+ 'DESCRIPTOR' : _CLIENTTASK,
101
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
102
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClientTask)
103
+ })
104
+ _sym_db.RegisterMessage(ClientTask)
105
+ _sym_db.RegisterMessage(ClientTask.KwargsEntry)
106
+
107
+ ClientTaskTicket = _reflection.GeneratedProtocolMessageType('ClientTaskTicket', (_message.Message,), {
108
+ 'DESCRIPTOR' : _CLIENTTASKTICKET,
109
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
110
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClientTaskTicket)
111
+ })
112
+ _sym_db.RegisterMessage(ClientTaskTicket)
113
+
114
+ PutRequest = _reflection.GeneratedProtocolMessageType('PutRequest', (_message.Message,), {
115
+ 'DESCRIPTOR' : _PUTREQUEST,
116
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
117
+ # @@protoc_insertion_point(class_scope:ray.rpc.PutRequest)
118
+ })
119
+ _sym_db.RegisterMessage(PutRequest)
120
+
121
+ PutResponse = _reflection.GeneratedProtocolMessageType('PutResponse', (_message.Message,), {
122
+ 'DESCRIPTOR' : _PUTRESPONSE,
123
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
124
+ # @@protoc_insertion_point(class_scope:ray.rpc.PutResponse)
125
+ })
126
+ _sym_db.RegisterMessage(PutResponse)
127
+
128
+ GetRequest = _reflection.GeneratedProtocolMessageType('GetRequest', (_message.Message,), {
129
+ 'DESCRIPTOR' : _GETREQUEST,
130
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
131
+ # @@protoc_insertion_point(class_scope:ray.rpc.GetRequest)
132
+ })
133
+ _sym_db.RegisterMessage(GetRequest)
134
+
135
+ GetResponse = _reflection.GeneratedProtocolMessageType('GetResponse', (_message.Message,), {
136
+ 'DESCRIPTOR' : _GETRESPONSE,
137
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
138
+ # @@protoc_insertion_point(class_scope:ray.rpc.GetResponse)
139
+ })
140
+ _sym_db.RegisterMessage(GetResponse)
141
+
142
+ WaitRequest = _reflection.GeneratedProtocolMessageType('WaitRequest', (_message.Message,), {
143
+ 'DESCRIPTOR' : _WAITREQUEST,
144
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
145
+ # @@protoc_insertion_point(class_scope:ray.rpc.WaitRequest)
146
+ })
147
+ _sym_db.RegisterMessage(WaitRequest)
148
+
149
+ WaitResponse = _reflection.GeneratedProtocolMessageType('WaitResponse', (_message.Message,), {
150
+ 'DESCRIPTOR' : _WAITRESPONSE,
151
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
152
+ # @@protoc_insertion_point(class_scope:ray.rpc.WaitResponse)
153
+ })
154
+ _sym_db.RegisterMessage(WaitResponse)
155
+
156
+ ClusterInfoType = _reflection.GeneratedProtocolMessageType('ClusterInfoType', (_message.Message,), {
157
+ 'DESCRIPTOR' : _CLUSTERINFOTYPE,
158
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
159
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClusterInfoType)
160
+ })
161
+ _sym_db.RegisterMessage(ClusterInfoType)
162
+
163
+ ClusterInfoRequest = _reflection.GeneratedProtocolMessageType('ClusterInfoRequest', (_message.Message,), {
164
+ 'DESCRIPTOR' : _CLUSTERINFOREQUEST,
165
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
166
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClusterInfoRequest)
167
+ })
168
+ _sym_db.RegisterMessage(ClusterInfoRequest)
169
+
170
+ ClusterInfoResponse = _reflection.GeneratedProtocolMessageType('ClusterInfoResponse', (_message.Message,), {
171
+
172
+ 'ResourceTable' : _reflection.GeneratedProtocolMessageType('ResourceTable', (_message.Message,), {
173
+
174
+ 'TableEntry' : _reflection.GeneratedProtocolMessageType('TableEntry', (_message.Message,), {
175
+ 'DESCRIPTOR' : _CLUSTERINFORESPONSE_RESOURCETABLE_TABLEENTRY,
176
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
177
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClusterInfoResponse.ResourceTable.TableEntry)
178
+ })
179
+ ,
180
+ 'DESCRIPTOR' : _CLUSTERINFORESPONSE_RESOURCETABLE,
181
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
182
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClusterInfoResponse.ResourceTable)
183
+ })
184
+ ,
185
+
186
+ 'RuntimeContext' : _reflection.GeneratedProtocolMessageType('RuntimeContext', (_message.Message,), {
187
+ 'DESCRIPTOR' : _CLUSTERINFORESPONSE_RUNTIMECONTEXT,
188
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
189
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClusterInfoResponse.RuntimeContext)
190
+ })
191
+ ,
192
+ 'DESCRIPTOR' : _CLUSTERINFORESPONSE,
193
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
194
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClusterInfoResponse)
195
+ })
196
+ _sym_db.RegisterMessage(ClusterInfoResponse)
197
+ _sym_db.RegisterMessage(ClusterInfoResponse.ResourceTable)
198
+ _sym_db.RegisterMessage(ClusterInfoResponse.ResourceTable.TableEntry)
199
+ _sym_db.RegisterMessage(ClusterInfoResponse.RuntimeContext)
200
+
201
+ TerminateRequest = _reflection.GeneratedProtocolMessageType('TerminateRequest', (_message.Message,), {
202
+
203
+ 'ActorTerminate' : _reflection.GeneratedProtocolMessageType('ActorTerminate', (_message.Message,), {
204
+ 'DESCRIPTOR' : _TERMINATEREQUEST_ACTORTERMINATE,
205
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
206
+ # @@protoc_insertion_point(class_scope:ray.rpc.TerminateRequest.ActorTerminate)
207
+ })
208
+ ,
209
+
210
+ 'TaskObjectTerminate' : _reflection.GeneratedProtocolMessageType('TaskObjectTerminate', (_message.Message,), {
211
+ 'DESCRIPTOR' : _TERMINATEREQUEST_TASKOBJECTTERMINATE,
212
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
213
+ # @@protoc_insertion_point(class_scope:ray.rpc.TerminateRequest.TaskObjectTerminate)
214
+ })
215
+ ,
216
+ 'DESCRIPTOR' : _TERMINATEREQUEST,
217
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
218
+ # @@protoc_insertion_point(class_scope:ray.rpc.TerminateRequest)
219
+ })
220
+ _sym_db.RegisterMessage(TerminateRequest)
221
+ _sym_db.RegisterMessage(TerminateRequest.ActorTerminate)
222
+ _sym_db.RegisterMessage(TerminateRequest.TaskObjectTerminate)
223
+
224
+ TerminateResponse = _reflection.GeneratedProtocolMessageType('TerminateResponse', (_message.Message,), {
225
+ 'DESCRIPTOR' : _TERMINATERESPONSE,
226
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
227
+ # @@protoc_insertion_point(class_scope:ray.rpc.TerminateResponse)
228
+ })
229
+ _sym_db.RegisterMessage(TerminateResponse)
230
+
231
+ KVExistsRequest = _reflection.GeneratedProtocolMessageType('KVExistsRequest', (_message.Message,), {
232
+ 'DESCRIPTOR' : _KVEXISTSREQUEST,
233
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
234
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVExistsRequest)
235
+ })
236
+ _sym_db.RegisterMessage(KVExistsRequest)
237
+
238
+ KVExistsResponse = _reflection.GeneratedProtocolMessageType('KVExistsResponse', (_message.Message,), {
239
+ 'DESCRIPTOR' : _KVEXISTSRESPONSE,
240
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
241
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVExistsResponse)
242
+ })
243
+ _sym_db.RegisterMessage(KVExistsResponse)
244
+
245
+ KVGetRequest = _reflection.GeneratedProtocolMessageType('KVGetRequest', (_message.Message,), {
246
+ 'DESCRIPTOR' : _KVGETREQUEST,
247
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
248
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVGetRequest)
249
+ })
250
+ _sym_db.RegisterMessage(KVGetRequest)
251
+
252
+ KVGetResponse = _reflection.GeneratedProtocolMessageType('KVGetResponse', (_message.Message,), {
253
+ 'DESCRIPTOR' : _KVGETRESPONSE,
254
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
255
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVGetResponse)
256
+ })
257
+ _sym_db.RegisterMessage(KVGetResponse)
258
+
259
+ KVPutRequest = _reflection.GeneratedProtocolMessageType('KVPutRequest', (_message.Message,), {
260
+ 'DESCRIPTOR' : _KVPUTREQUEST,
261
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
262
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVPutRequest)
263
+ })
264
+ _sym_db.RegisterMessage(KVPutRequest)
265
+
266
+ KVPutResponse = _reflection.GeneratedProtocolMessageType('KVPutResponse', (_message.Message,), {
267
+ 'DESCRIPTOR' : _KVPUTRESPONSE,
268
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
269
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVPutResponse)
270
+ })
271
+ _sym_db.RegisterMessage(KVPutResponse)
272
+
273
+ KVDelRequest = _reflection.GeneratedProtocolMessageType('KVDelRequest', (_message.Message,), {
274
+ 'DESCRIPTOR' : _KVDELREQUEST,
275
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
276
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVDelRequest)
277
+ })
278
+ _sym_db.RegisterMessage(KVDelRequest)
279
+
280
+ KVDelResponse = _reflection.GeneratedProtocolMessageType('KVDelResponse', (_message.Message,), {
281
+ 'DESCRIPTOR' : _KVDELRESPONSE,
282
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
283
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVDelResponse)
284
+ })
285
+ _sym_db.RegisterMessage(KVDelResponse)
286
+
287
+ KVListRequest = _reflection.GeneratedProtocolMessageType('KVListRequest', (_message.Message,), {
288
+ 'DESCRIPTOR' : _KVLISTREQUEST,
289
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
290
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVListRequest)
291
+ })
292
+ _sym_db.RegisterMessage(KVListRequest)
293
+
294
+ KVListResponse = _reflection.GeneratedProtocolMessageType('KVListResponse', (_message.Message,), {
295
+ 'DESCRIPTOR' : _KVLISTRESPONSE,
296
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
297
+ # @@protoc_insertion_point(class_scope:ray.rpc.KVListResponse)
298
+ })
299
+ _sym_db.RegisterMessage(KVListResponse)
300
+
301
+ ClientPinRuntimeEnvURIRequest = _reflection.GeneratedProtocolMessageType('ClientPinRuntimeEnvURIRequest', (_message.Message,), {
302
+ 'DESCRIPTOR' : _CLIENTPINRUNTIMEENVURIREQUEST,
303
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
304
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClientPinRuntimeEnvURIRequest)
305
+ })
306
+ _sym_db.RegisterMessage(ClientPinRuntimeEnvURIRequest)
307
+
308
+ ClientPinRuntimeEnvURIResponse = _reflection.GeneratedProtocolMessageType('ClientPinRuntimeEnvURIResponse', (_message.Message,), {
309
+ 'DESCRIPTOR' : _CLIENTPINRUNTIMEENVURIRESPONSE,
310
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
311
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClientPinRuntimeEnvURIResponse)
312
+ })
313
+ _sym_db.RegisterMessage(ClientPinRuntimeEnvURIResponse)
314
+
315
+ InitRequest = _reflection.GeneratedProtocolMessageType('InitRequest', (_message.Message,), {
316
+ 'DESCRIPTOR' : _INITREQUEST,
317
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
318
+ # @@protoc_insertion_point(class_scope:ray.rpc.InitRequest)
319
+ })
320
+ _sym_db.RegisterMessage(InitRequest)
321
+
322
+ InitResponse = _reflection.GeneratedProtocolMessageType('InitResponse', (_message.Message,), {
323
+ 'DESCRIPTOR' : _INITRESPONSE,
324
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
325
+ # @@protoc_insertion_point(class_scope:ray.rpc.InitResponse)
326
+ })
327
+ _sym_db.RegisterMessage(InitResponse)
328
+
329
+ PrepRuntimeEnvRequest = _reflection.GeneratedProtocolMessageType('PrepRuntimeEnvRequest', (_message.Message,), {
330
+ 'DESCRIPTOR' : _PREPRUNTIMEENVREQUEST,
331
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
332
+ # @@protoc_insertion_point(class_scope:ray.rpc.PrepRuntimeEnvRequest)
333
+ })
334
+ _sym_db.RegisterMessage(PrepRuntimeEnvRequest)
335
+
336
+ PrepRuntimeEnvResponse = _reflection.GeneratedProtocolMessageType('PrepRuntimeEnvResponse', (_message.Message,), {
337
+ 'DESCRIPTOR' : _PREPRUNTIMEENVRESPONSE,
338
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
339
+ # @@protoc_insertion_point(class_scope:ray.rpc.PrepRuntimeEnvResponse)
340
+ })
341
+ _sym_db.RegisterMessage(PrepRuntimeEnvResponse)
342
+
343
+ ClientListNamedActorsRequest = _reflection.GeneratedProtocolMessageType('ClientListNamedActorsRequest', (_message.Message,), {
344
+ 'DESCRIPTOR' : _CLIENTLISTNAMEDACTORSREQUEST,
345
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
346
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClientListNamedActorsRequest)
347
+ })
348
+ _sym_db.RegisterMessage(ClientListNamedActorsRequest)
349
+
350
+ ClientListNamedActorsResponse = _reflection.GeneratedProtocolMessageType('ClientListNamedActorsResponse', (_message.Message,), {
351
+ 'DESCRIPTOR' : _CLIENTLISTNAMEDACTORSRESPONSE,
352
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
353
+ # @@protoc_insertion_point(class_scope:ray.rpc.ClientListNamedActorsResponse)
354
+ })
355
+ _sym_db.RegisterMessage(ClientListNamedActorsResponse)
356
+
357
+ ReleaseRequest = _reflection.GeneratedProtocolMessageType('ReleaseRequest', (_message.Message,), {
358
+ 'DESCRIPTOR' : _RELEASEREQUEST,
359
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
360
+ # @@protoc_insertion_point(class_scope:ray.rpc.ReleaseRequest)
361
+ })
362
+ _sym_db.RegisterMessage(ReleaseRequest)
363
+
364
+ ReleaseResponse = _reflection.GeneratedProtocolMessageType('ReleaseResponse', (_message.Message,), {
365
+ 'DESCRIPTOR' : _RELEASERESPONSE,
366
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
367
+ # @@protoc_insertion_point(class_scope:ray.rpc.ReleaseResponse)
368
+ })
369
+ _sym_db.RegisterMessage(ReleaseResponse)
370
+
371
+ ConnectionInfoRequest = _reflection.GeneratedProtocolMessageType('ConnectionInfoRequest', (_message.Message,), {
372
+ 'DESCRIPTOR' : _CONNECTIONINFOREQUEST,
373
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
374
+ # @@protoc_insertion_point(class_scope:ray.rpc.ConnectionInfoRequest)
375
+ })
376
+ _sym_db.RegisterMessage(ConnectionInfoRequest)
377
+
378
+ ConnectionInfoResponse = _reflection.GeneratedProtocolMessageType('ConnectionInfoResponse', (_message.Message,), {
379
+ 'DESCRIPTOR' : _CONNECTIONINFORESPONSE,
380
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
381
+ # @@protoc_insertion_point(class_scope:ray.rpc.ConnectionInfoResponse)
382
+ })
383
+ _sym_db.RegisterMessage(ConnectionInfoResponse)
384
+
385
+ ConnectionCleanupRequest = _reflection.GeneratedProtocolMessageType('ConnectionCleanupRequest', (_message.Message,), {
386
+ 'DESCRIPTOR' : _CONNECTIONCLEANUPREQUEST,
387
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
388
+ # @@protoc_insertion_point(class_scope:ray.rpc.ConnectionCleanupRequest)
389
+ })
390
+ _sym_db.RegisterMessage(ConnectionCleanupRequest)
391
+
392
+ ConnectionCleanupResponse = _reflection.GeneratedProtocolMessageType('ConnectionCleanupResponse', (_message.Message,), {
393
+ 'DESCRIPTOR' : _CONNECTIONCLEANUPRESPONSE,
394
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
395
+ # @@protoc_insertion_point(class_scope:ray.rpc.ConnectionCleanupResponse)
396
+ })
397
+ _sym_db.RegisterMessage(ConnectionCleanupResponse)
398
+
399
+ AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), {
400
+ 'DESCRIPTOR' : _ACKNOWLEDGEREQUEST,
401
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
402
+ # @@protoc_insertion_point(class_scope:ray.rpc.AcknowledgeRequest)
403
+ })
404
+ _sym_db.RegisterMessage(AcknowledgeRequest)
405
+
406
+ DataRequest = _reflection.GeneratedProtocolMessageType('DataRequest', (_message.Message,), {
407
+ 'DESCRIPTOR' : _DATAREQUEST,
408
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
409
+ # @@protoc_insertion_point(class_scope:ray.rpc.DataRequest)
410
+ })
411
+ _sym_db.RegisterMessage(DataRequest)
412
+
413
+ DataResponse = _reflection.GeneratedProtocolMessageType('DataResponse', (_message.Message,), {
414
+ 'DESCRIPTOR' : _DATARESPONSE,
415
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
416
+ # @@protoc_insertion_point(class_scope:ray.rpc.DataResponse)
417
+ })
418
+ _sym_db.RegisterMessage(DataResponse)
419
+
420
+ LogSettingsRequest = _reflection.GeneratedProtocolMessageType('LogSettingsRequest', (_message.Message,), {
421
+ 'DESCRIPTOR' : _LOGSETTINGSREQUEST,
422
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
423
+ # @@protoc_insertion_point(class_scope:ray.rpc.LogSettingsRequest)
424
+ })
425
+ _sym_db.RegisterMessage(LogSettingsRequest)
426
+
427
+ LogData = _reflection.GeneratedProtocolMessageType('LogData', (_message.Message,), {
428
+ 'DESCRIPTOR' : _LOGDATA,
429
+ '__module__' : 'src.ray.protobuf.ray_client_pb2'
430
+ # @@protoc_insertion_point(class_scope:ray.rpc.LogData)
431
+ })
432
+ _sym_db.RegisterMessage(LogData)
433
+
434
+ _RAYLETDRIVER = DESCRIPTOR.services_by_name['RayletDriver']
435
+ _RAYLETDATASTREAMER = DESCRIPTOR.services_by_name['RayletDataStreamer']
436
+ _RAYLETLOGSTREAMER = DESCRIPTOR.services_by_name['RayletLogStreamer']
437
+ if _descriptor._USE_C_DESCRIPTORS == False:
438
+
439
+ DESCRIPTOR._options = None
440
+ DESCRIPTOR._serialized_options = b'\370\001\001'
441
+ _CLIENTTASK_KWARGSENTRY._options = None
442
+ _CLIENTTASK_KWARGSENTRY._serialized_options = b'8\001'
443
+ _GETREQUEST.fields_by_name['id']._options = None
444
+ _GETREQUEST.fields_by_name['id']._serialized_options = b'\030\001'
445
+ _CLUSTERINFORESPONSE_RESOURCETABLE_TABLEENTRY._options = None
446
+ _CLUSTERINFORESPONSE_RESOURCETABLE_TABLEENTRY._serialized_options = b'8\001'
447
+ _TYPE._serialized_start=6404
448
+ _TYPE._serialized_end=6423
449
+ _ARG._serialized_start=47
450
+ _ARG._serialized_end=228
451
+ _ARG_LOCALITY._serialized_start=189
452
+ _ARG_LOCALITY._serialized_end=228
453
+ _TASKOPTIONS._serialized_start=230
454
+ _TASKOPTIONS._serialized_end=284
455
+ _CLIENTTASK._serialized_start=287
456
+ _CLIENTTASK._serialized_end=915
457
+ _CLIENTTASK_KWARGSENTRY._serialized_start=753
458
+ _CLIENTTASK_KWARGSENTRY._serialized_end=824
459
+ _CLIENTTASK_REMOTEEXECTYPE._serialized_start=826
460
+ _CLIENTTASK_REMOTEEXECTYPE._serialized_end=915
461
+ _CLIENTTASKTICKET._serialized_start=917
462
+ _CLIENTTASKTICKET._serialized_end=1010
463
+ _PUTREQUEST._serialized_start=1013
464
+ _PUTREQUEST._serialized_end=1201
465
+ _PUTRESPONSE._serialized_start=1203
466
+ _PUTRESPONSE._serialized_end=1276
467
+ _GETREQUEST._serialized_start=1279
468
+ _GETREQUEST._serialized_end=1429
469
+ _GETRESPONSE._serialized_start=1432
470
+ _GETRESPONSE._serialized_end=1602
471
+ _WAITREQUEST._serialized_start=1605
472
+ _WAITREQUEST._serialized_end=1737
473
+ _WAITRESPONSE._serialized_start=1740
474
+ _WAITRESPONSE._serialized_end=1868
475
+ _CLUSTERINFOTYPE._serialized_start=1871
476
+ _CLUSTERINFOTYPE._serialized_end=2044
477
+ _CLUSTERINFOTYPE_TYPEENUM._serialized_start=1891
478
+ _CLUSTERINFOTYPE_TYPEENUM._serialized_end=2044
479
+ _CLUSTERINFOREQUEST._serialized_start=2046
480
+ _CLUSTERINFOREQUEST._serialized_end=2121
481
+ _CLUSTERINFORESPONSE._serialized_start=2124
482
+ _CLUSTERINFORESPONSE._serialized_end=2778
483
+ _CLUSTERINFORESPONSE_RESOURCETABLE._serialized_start=2398
484
+ _CLUSTERINFORESPONSE_RESOURCETABLE._serialized_end=2548
485
+ _CLUSTERINFORESPONSE_RESOURCETABLE_TABLEENTRY._serialized_start=2492
486
+ _CLUSTERINFORESPONSE_RESOURCETABLE_TABLEENTRY._serialized_end=2548
487
+ _CLUSTERINFORESPONSE_RUNTIMECONTEXT._serialized_start=2551
488
+ _CLUSTERINFORESPONSE_RUNTIMECONTEXT._serialized_end=2761
489
+ _TERMINATEREQUEST._serialized_start=2781
490
+ _TERMINATEREQUEST._serialized_end=3150
491
+ _TERMINATEREQUEST_ACTORTERMINATE._serialized_start=2978
492
+ _TERMINATEREQUEST_ACTORTERMINATE._serialized_end=3041
493
+ _TERMINATEREQUEST_TASKOBJECTTERMINATE._serialized_start=3043
494
+ _TERMINATEREQUEST_TASKOBJECTTERMINATE._serialized_end=3132
495
+ _TERMINATERESPONSE._serialized_start=3152
496
+ _TERMINATERESPONSE._serialized_end=3187
497
+ _KVEXISTSREQUEST._serialized_start=3189
498
+ _KVEXISTSREQUEST._serialized_end=3273
499
+ _KVEXISTSRESPONSE._serialized_start=3275
500
+ _KVEXISTSRESPONSE._serialized_end=3317
501
+ _KVGETREQUEST._serialized_start=3319
502
+ _KVGETREQUEST._serialized_end=3400
503
+ _KVGETRESPONSE._serialized_start=3402
504
+ _KVGETRESPONSE._serialized_end=3454
505
+ _KVPUTREQUEST._serialized_start=3457
506
+ _KVPUTREQUEST._serialized_end=3590
507
+ _KVPUTRESPONSE._serialized_start=3592
508
+ _KVPUTRESPONSE._serialized_end=3646
509
+ _KVDELREQUEST._serialized_start=3648
510
+ _KVDELREQUEST._serialized_end=3765
511
+ _KVDELRESPONSE._serialized_start=3767
512
+ _KVDELRESPONSE._serialized_end=3815
513
+ _KVLISTREQUEST._serialized_start=3817
514
+ _KVLISTREQUEST._serialized_end=3905
515
+ _KVLISTRESPONSE._serialized_start=3907
516
+ _KVLISTRESPONSE._serialized_end=3943
517
+ _CLIENTPINRUNTIMEENVURIREQUEST._serialized_start=3945
518
+ _CLIENTPINRUNTIMEENVURIREQUEST._serialized_end=4029
519
+ _CLIENTPINRUNTIMEENVURIRESPONSE._serialized_start=4031
520
+ _CLIENTPINRUNTIMEENVURIRESPONSE._serialized_end=4063
521
+ _INITREQUEST._serialized_start=4066
522
+ _INITREQUEST._serialized_end=4204
523
+ _INITRESPONSE._serialized_start=4206
524
+ _INITRESPONSE._serialized_end=4254
525
+ _PREPRUNTIMEENVREQUEST._serialized_start=4256
526
+ _PREPRUNTIMEENVREQUEST._serialized_end=4279
527
+ _PREPRUNTIMEENVRESPONSE._serialized_start=4281
528
+ _PREPRUNTIMEENVRESPONSE._serialized_end=4305
529
+ _CLIENTLISTNAMEDACTORSREQUEST._serialized_start=4307
530
+ _CLIENTLISTNAMEDACTORSREQUEST._serialized_end=4376
531
+ _CLIENTLISTNAMEDACTORSRESPONSE._serialized_start=4378
532
+ _CLIENTLISTNAMEDACTORSRESPONSE._serialized_end=4442
533
+ _RELEASEREQUEST._serialized_start=4444
534
+ _RELEASEREQUEST._serialized_end=4478
535
+ _RELEASERESPONSE._serialized_start=4480
536
+ _RELEASERESPONSE._serialized_end=4513
537
+ _CONNECTIONINFOREQUEST._serialized_start=4515
538
+ _CONNECTIONINFOREQUEST._serialized_end=4538
539
+ _CONNECTIONINFORESPONSE._serialized_start=4541
540
+ _CONNECTIONINFORESPONSE._serialized_end=4744
541
+ _CONNECTIONCLEANUPREQUEST._serialized_start=4746
542
+ _CONNECTIONCLEANUPREQUEST._serialized_end=4772
543
+ _CONNECTIONCLEANUPRESPONSE._serialized_start=4774
544
+ _CONNECTIONCLEANUPRESPONSE._serialized_end=4801
545
+ _ACKNOWLEDGEREQUEST._serialized_start=4803
546
+ _ACKNOWLEDGEREQUEST._serialized_end=4846
547
+ _DATAREQUEST._serialized_start=4849
548
+ _DATAREQUEST._serialized_end=5559
549
+ _DATARESPONSE._serialized_start=5562
550
+ _DATARESPONSE._serialized_end=6255
551
+ _LOGSETTINGSREQUEST._serialized_start=6257
552
+ _LOGSETTINGSREQUEST._serialized_end=6331
553
+ _LOGDATA._serialized_start=6333
554
+ _LOGDATA._serialized_end=6402
555
+ _RAYLETDRIVER._serialized_start=6426
556
+ _RAYLETDRIVER._serialized_end=7472
557
+ _RAYLETDATASTREAMER._serialized_start=7474
558
+ _RAYLETDATASTREAMER._serialized_end=7557
559
+ _RAYLETLOGSTREAMER._serialized_start=7559
560
+ _RAYLETLOGSTREAMER._serialized_end=7644
561
+ # @@protoc_insertion_point(module_scope)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/reporter_pb2_grpc.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
5
+ from . import reporter_pb2 as src_dot_ray_dot_protobuf_dot_reporter__pb2
6
+
7
+
8
+ class ReporterServiceStub(object):
9
+ """Missing associated documentation comment in .proto file."""
10
+
11
+ def __init__(self, channel):
12
+ """Constructor.
13
+
14
+ Args:
15
+ channel: A grpc.Channel.
16
+ """
17
+ self.GetProfilingStats = channel.unary_unary(
18
+ '/ray.rpc.ReporterService/GetProfilingStats',
19
+ request_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetProfilingStatsRequest.SerializeToString,
20
+ response_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetProfilingStatsReply.FromString,
21
+ )
22
+ self.ReportOCMetrics = channel.unary_unary(
23
+ '/ray.rpc.ReporterService/ReportOCMetrics',
24
+ request_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ReportOCMetricsRequest.SerializeToString,
25
+ response_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ReportOCMetricsReply.FromString,
26
+ )
27
+ self.GetTraceback = channel.unary_unary(
28
+ '/ray.rpc.ReporterService/GetTraceback',
29
+ request_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetTracebackRequest.SerializeToString,
30
+ response_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetTracebackReply.FromString,
31
+ )
32
+ self.CpuProfiling = channel.unary_unary(
33
+ '/ray.rpc.ReporterService/CpuProfiling',
34
+ request_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.CpuProfilingRequest.SerializeToString,
35
+ response_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.CpuProfilingReply.FromString,
36
+ )
37
+ self.MemoryProfiling = channel.unary_unary(
38
+ '/ray.rpc.ReporterService/MemoryProfiling',
39
+ request_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.MemoryProfilingRequest.SerializeToString,
40
+ response_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.MemoryProfilingReply.FromString,
41
+ )
42
+
43
+
44
+ class ReporterServiceServicer(object):
45
+ """Missing associated documentation comment in .proto file."""
46
+
47
+ def GetProfilingStats(self, request, context):
48
+ """Missing associated documentation comment in .proto file."""
49
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
50
+ context.set_details('Method not implemented!')
51
+ raise NotImplementedError('Method not implemented!')
52
+
53
+ def ReportOCMetrics(self, request, context):
54
+ """Missing associated documentation comment in .proto file."""
55
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
56
+ context.set_details('Method not implemented!')
57
+ raise NotImplementedError('Method not implemented!')
58
+
59
+ def GetTraceback(self, request, context):
60
+ """Missing associated documentation comment in .proto file."""
61
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
62
+ context.set_details('Method not implemented!')
63
+ raise NotImplementedError('Method not implemented!')
64
+
65
+ def CpuProfiling(self, request, context):
66
+ """Missing associated documentation comment in .proto file."""
67
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
68
+ context.set_details('Method not implemented!')
69
+ raise NotImplementedError('Method not implemented!')
70
+
71
+ def MemoryProfiling(self, request, context):
72
+ """Missing associated documentation comment in .proto file."""
73
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
74
+ context.set_details('Method not implemented!')
75
+ raise NotImplementedError('Method not implemented!')
76
+
77
+
78
+ def add_ReporterServiceServicer_to_server(servicer, server):
79
+ rpc_method_handlers = {
80
+ 'GetProfilingStats': grpc.unary_unary_rpc_method_handler(
81
+ servicer.GetProfilingStats,
82
+ request_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetProfilingStatsRequest.FromString,
83
+ response_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetProfilingStatsReply.SerializeToString,
84
+ ),
85
+ 'ReportOCMetrics': grpc.unary_unary_rpc_method_handler(
86
+ servicer.ReportOCMetrics,
87
+ request_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ReportOCMetricsRequest.FromString,
88
+ response_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ReportOCMetricsReply.SerializeToString,
89
+ ),
90
+ 'GetTraceback': grpc.unary_unary_rpc_method_handler(
91
+ servicer.GetTraceback,
92
+ request_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetTracebackRequest.FromString,
93
+ response_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.GetTracebackReply.SerializeToString,
94
+ ),
95
+ 'CpuProfiling': grpc.unary_unary_rpc_method_handler(
96
+ servicer.CpuProfiling,
97
+ request_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.CpuProfilingRequest.FromString,
98
+ response_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.CpuProfilingReply.SerializeToString,
99
+ ),
100
+ 'MemoryProfiling': grpc.unary_unary_rpc_method_handler(
101
+ servicer.MemoryProfiling,
102
+ request_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.MemoryProfilingRequest.FromString,
103
+ response_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.MemoryProfilingReply.SerializeToString,
104
+ ),
105
+ }
106
+ generic_handler = grpc.method_handlers_generic_handler(
107
+ 'ray.rpc.ReporterService', rpc_method_handlers)
108
+ server.add_generic_rpc_handlers((generic_handler,))
109
+
110
+
111
+ # This class is part of an EXPERIMENTAL API.
112
+ class ReporterService(object):
113
+ """Missing associated documentation comment in .proto file."""
114
+
115
+ @staticmethod
116
+ def GetProfilingStats(request,
117
+ target,
118
+ options=(),
119
+ channel_credentials=None,
120
+ call_credentials=None,
121
+ insecure=False,
122
+ compression=None,
123
+ wait_for_ready=None,
124
+ timeout=None,
125
+ metadata=None):
126
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.ReporterService/GetProfilingStats',
127
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.GetProfilingStatsRequest.SerializeToString,
128
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.GetProfilingStatsReply.FromString,
129
+ options, channel_credentials,
130
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
131
+
132
+ @staticmethod
133
+ def ReportOCMetrics(request,
134
+ target,
135
+ options=(),
136
+ channel_credentials=None,
137
+ call_credentials=None,
138
+ insecure=False,
139
+ compression=None,
140
+ wait_for_ready=None,
141
+ timeout=None,
142
+ metadata=None):
143
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.ReporterService/ReportOCMetrics',
144
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.ReportOCMetricsRequest.SerializeToString,
145
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.ReportOCMetricsReply.FromString,
146
+ options, channel_credentials,
147
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
148
+
149
+ @staticmethod
150
+ def GetTraceback(request,
151
+ target,
152
+ options=(),
153
+ channel_credentials=None,
154
+ call_credentials=None,
155
+ insecure=False,
156
+ compression=None,
157
+ wait_for_ready=None,
158
+ timeout=None,
159
+ metadata=None):
160
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.ReporterService/GetTraceback',
161
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.GetTracebackRequest.SerializeToString,
162
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.GetTracebackReply.FromString,
163
+ options, channel_credentials,
164
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
165
+
166
+ @staticmethod
167
+ def CpuProfiling(request,
168
+ target,
169
+ options=(),
170
+ channel_credentials=None,
171
+ call_credentials=None,
172
+ insecure=False,
173
+ compression=None,
174
+ wait_for_ready=None,
175
+ timeout=None,
176
+ metadata=None):
177
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.ReporterService/CpuProfiling',
178
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.CpuProfilingRequest.SerializeToString,
179
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.CpuProfilingReply.FromString,
180
+ options, channel_credentials,
181
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
182
+
183
+ @staticmethod
184
+ def MemoryProfiling(request,
185
+ target,
186
+ options=(),
187
+ channel_credentials=None,
188
+ call_credentials=None,
189
+ insecure=False,
190
+ compression=None,
191
+ wait_for_ready=None,
192
+ timeout=None,
193
+ metadata=None):
194
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.ReporterService/MemoryProfiling',
195
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.MemoryProfilingRequest.SerializeToString,
196
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.MemoryProfilingReply.FromString,
197
+ options, channel_credentials,
198
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
199
+
200
+
201
+ class LogServiceStub(object):
202
+ """Missing associated documentation comment in .proto file."""
203
+
204
+ def __init__(self, channel):
205
+ """Constructor.
206
+
207
+ Args:
208
+ channel: A grpc.Channel.
209
+ """
210
+ self.ListLogs = channel.unary_unary(
211
+ '/ray.rpc.LogService/ListLogs',
212
+ request_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ListLogsRequest.SerializeToString,
213
+ response_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ListLogsReply.FromString,
214
+ )
215
+ self.StreamLog = channel.unary_stream(
216
+ '/ray.rpc.LogService/StreamLog',
217
+ request_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.StreamLogRequest.SerializeToString,
218
+ response_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.StreamLogReply.FromString,
219
+ )
220
+
221
+
222
+ class LogServiceServicer(object):
223
+ """Missing associated documentation comment in .proto file."""
224
+
225
+ def ListLogs(self, request, context):
226
+ """Missing associated documentation comment in .proto file."""
227
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
228
+ context.set_details('Method not implemented!')
229
+ raise NotImplementedError('Method not implemented!')
230
+
231
+ def StreamLog(self, request, context):
232
+ """Missing associated documentation comment in .proto file."""
233
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
234
+ context.set_details('Method not implemented!')
235
+ raise NotImplementedError('Method not implemented!')
236
+
237
+
238
+ def add_LogServiceServicer_to_server(servicer, server):
239
+ rpc_method_handlers = {
240
+ 'ListLogs': grpc.unary_unary_rpc_method_handler(
241
+ servicer.ListLogs,
242
+ request_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ListLogsRequest.FromString,
243
+ response_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.ListLogsReply.SerializeToString,
244
+ ),
245
+ 'StreamLog': grpc.unary_stream_rpc_method_handler(
246
+ servicer.StreamLog,
247
+ request_deserializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.StreamLogRequest.FromString,
248
+ response_serializer=src_dot_ray_dot_protobuf_dot_reporter__pb2.StreamLogReply.SerializeToString,
249
+ ),
250
+ }
251
+ generic_handler = grpc.method_handlers_generic_handler(
252
+ 'ray.rpc.LogService', rpc_method_handlers)
253
+ server.add_generic_rpc_handlers((generic_handler,))
254
+
255
+
256
+ # This class is part of an EXPERIMENTAL API.
257
+ class LogService(object):
258
+ """Missing associated documentation comment in .proto file."""
259
+
260
+ @staticmethod
261
+ def ListLogs(request,
262
+ target,
263
+ options=(),
264
+ channel_credentials=None,
265
+ call_credentials=None,
266
+ insecure=False,
267
+ compression=None,
268
+ wait_for_ready=None,
269
+ timeout=None,
270
+ metadata=None):
271
+ return grpc.experimental.unary_unary(request, target, '/ray.rpc.LogService/ListLogs',
272
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.ListLogsRequest.SerializeToString,
273
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.ListLogsReply.FromString,
274
+ options, channel_credentials,
275
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
276
+
277
+ @staticmethod
278
+ def StreamLog(request,
279
+ target,
280
+ options=(),
281
+ channel_credentials=None,
282
+ call_credentials=None,
283
+ insecure=False,
284
+ compression=None,
285
+ wait_for_ready=None,
286
+ timeout=None,
287
+ metadata=None):
288
+ return grpc.experimental.unary_stream(request, target, '/ray.rpc.LogService/StreamLog',
289
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.StreamLogRequest.SerializeToString,
290
+ src_dot_ray_dot_protobuf_dot_reporter__pb2.StreamLogReply.FromString,
291
+ options, channel_credentials,
292
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/resource_pb2_grpc.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/runtime_env_agent_pb2.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: src/ray/protobuf/runtime_env_agent.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf import descriptor as _descriptor
6
+ from google.protobuf import descriptor_pool as _descriptor_pool
7
+ from google.protobuf import message as _message
8
+ from google.protobuf import reflection as _reflection
9
+ from google.protobuf import symbol_database as _symbol_database
10
+ # @@protoc_insertion_point(imports)
11
+
12
+ _sym_db = _symbol_database.Default()
13
+
14
+
15
+ from . import runtime_env_common_pb2 as src_dot_ray_dot_protobuf_dot_runtime__env__common__pb2
16
+ from . import agent_manager_pb2 as src_dot_ray_dot_protobuf_dot_agent__manager__pb2
17
+
18
+
19
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(src/ray/protobuf/runtime_env_agent.proto\x12\x07ray.rpc\x1a)src/ray/protobuf/runtime_env_common.proto\x1a$src/ray/protobuf/agent_manager.proto\"\xdb\x01\n\x1cGetOrCreateRuntimeEnvRequest\x12\x34\n\x16serialized_runtime_env\x18\x01 \x01(\tR\x14serializedRuntimeEnv\x12G\n\x12runtime_env_config\x18\x02 \x01(\x0b\x32\x19.ray.rpc.RuntimeEnvConfigR\x10runtimeEnvConfig\x12\x15\n\x06job_id\x18\x03 \x01(\x0cR\x05jobId\x12%\n\x0esource_process\x18\x04 \x01(\tR\rsourceProcess\"\xb7\x01\n\x1aGetOrCreateRuntimeEnvReply\x12/\n\x06status\x18\x01 \x01(\x0e\x32\x17.ray.rpc.AgentRpcStatusR\x06status\x12#\n\rerror_message\x18\x02 \x01(\tR\x0c\x65rrorMessage\x12\x43\n\x1eserialized_runtime_env_context\x18\x03 \x01(\tR\x1bserializedRuntimeEnvContext\"\x80\x01\n!DeleteRuntimeEnvIfPossibleRequest\x12\x34\n\x16serialized_runtime_env\x18\x01 \x01(\tR\x14serializedRuntimeEnv\x12%\n\x0esource_process\x18\x02 \x01(\tR\rsourceProcess\"w\n\x1f\x44\x65leteRuntimeEnvIfPossibleReply\x12/\n\x06status\x18\x01 \x01(\x0e\x32\x17.ray.rpc.AgentRpcStatusR\x06status\x12#\n\rerror_message\x18\x02 \x01(\tR\x0c\x65rrorMessage\"@\n\x19GetRuntimeEnvsInfoRequest\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x42\x08\n\x06_limit\"w\n\x17GetRuntimeEnvsInfoReply\x12\x46\n\x12runtime_env_states\x18\x01 \x03(\x0b\x32\x18.ray.rpc.RuntimeEnvStateR\x10runtimeEnvStates\x12\x14\n\x05total\x18\x02 \x01(\x03R\x05totalB\x03\xf8\x01\x01\x62\x06proto3')
20
+
21
+
22
+
23
+ _GETORCREATERUNTIMEENVREQUEST = DESCRIPTOR.message_types_by_name['GetOrCreateRuntimeEnvRequest']
24
+ _GETORCREATERUNTIMEENVREPLY = DESCRIPTOR.message_types_by_name['GetOrCreateRuntimeEnvReply']
25
+ _DELETERUNTIMEENVIFPOSSIBLEREQUEST = DESCRIPTOR.message_types_by_name['DeleteRuntimeEnvIfPossibleRequest']
26
+ _DELETERUNTIMEENVIFPOSSIBLEREPLY = DESCRIPTOR.message_types_by_name['DeleteRuntimeEnvIfPossibleReply']
27
+ _GETRUNTIMEENVSINFOREQUEST = DESCRIPTOR.message_types_by_name['GetRuntimeEnvsInfoRequest']
28
+ _GETRUNTIMEENVSINFOREPLY = DESCRIPTOR.message_types_by_name['GetRuntimeEnvsInfoReply']
29
+ GetOrCreateRuntimeEnvRequest = _reflection.GeneratedProtocolMessageType('GetOrCreateRuntimeEnvRequest', (_message.Message,), {
30
+ 'DESCRIPTOR' : _GETORCREATERUNTIMEENVREQUEST,
31
+ '__module__' : 'src.ray.protobuf.runtime_env_agent_pb2'
32
+ # @@protoc_insertion_point(class_scope:ray.rpc.GetOrCreateRuntimeEnvRequest)
33
+ })
34
+ _sym_db.RegisterMessage(GetOrCreateRuntimeEnvRequest)
35
+
36
+ GetOrCreateRuntimeEnvReply = _reflection.GeneratedProtocolMessageType('GetOrCreateRuntimeEnvReply', (_message.Message,), {
37
+ 'DESCRIPTOR' : _GETORCREATERUNTIMEENVREPLY,
38
+ '__module__' : 'src.ray.protobuf.runtime_env_agent_pb2'
39
+ # @@protoc_insertion_point(class_scope:ray.rpc.GetOrCreateRuntimeEnvReply)
40
+ })
41
+ _sym_db.RegisterMessage(GetOrCreateRuntimeEnvReply)
42
+
43
+ DeleteRuntimeEnvIfPossibleRequest = _reflection.GeneratedProtocolMessageType('DeleteRuntimeEnvIfPossibleRequest', (_message.Message,), {
44
+ 'DESCRIPTOR' : _DELETERUNTIMEENVIFPOSSIBLEREQUEST,
45
+ '__module__' : 'src.ray.protobuf.runtime_env_agent_pb2'
46
+ # @@protoc_insertion_point(class_scope:ray.rpc.DeleteRuntimeEnvIfPossibleRequest)
47
+ })
48
+ _sym_db.RegisterMessage(DeleteRuntimeEnvIfPossibleRequest)
49
+
50
+ DeleteRuntimeEnvIfPossibleReply = _reflection.GeneratedProtocolMessageType('DeleteRuntimeEnvIfPossibleReply', (_message.Message,), {
51
+ 'DESCRIPTOR' : _DELETERUNTIMEENVIFPOSSIBLEREPLY,
52
+ '__module__' : 'src.ray.protobuf.runtime_env_agent_pb2'
53
+ # @@protoc_insertion_point(class_scope:ray.rpc.DeleteRuntimeEnvIfPossibleReply)
54
+ })
55
+ _sym_db.RegisterMessage(DeleteRuntimeEnvIfPossibleReply)
56
+
57
+ GetRuntimeEnvsInfoRequest = _reflection.GeneratedProtocolMessageType('GetRuntimeEnvsInfoRequest', (_message.Message,), {
58
+ 'DESCRIPTOR' : _GETRUNTIMEENVSINFOREQUEST,
59
+ '__module__' : 'src.ray.protobuf.runtime_env_agent_pb2'
60
+ # @@protoc_insertion_point(class_scope:ray.rpc.GetRuntimeEnvsInfoRequest)
61
+ })
62
+ _sym_db.RegisterMessage(GetRuntimeEnvsInfoRequest)
63
+
64
+ GetRuntimeEnvsInfoReply = _reflection.GeneratedProtocolMessageType('GetRuntimeEnvsInfoReply', (_message.Message,), {
65
+ 'DESCRIPTOR' : _GETRUNTIMEENVSINFOREPLY,
66
+ '__module__' : 'src.ray.protobuf.runtime_env_agent_pb2'
67
+ # @@protoc_insertion_point(class_scope:ray.rpc.GetRuntimeEnvsInfoReply)
68
+ })
69
+ _sym_db.RegisterMessage(GetRuntimeEnvsInfoReply)
70
+
71
+ if _descriptor._USE_C_DESCRIPTORS == False:
72
+
73
+ DESCRIPTOR._options = None
74
+ DESCRIPTOR._serialized_options = b'\370\001\001'
75
+ _GETORCREATERUNTIMEENVREQUEST._serialized_start=135
76
+ _GETORCREATERUNTIMEENVREQUEST._serialized_end=354
77
+ _GETORCREATERUNTIMEENVREPLY._serialized_start=357
78
+ _GETORCREATERUNTIMEENVREPLY._serialized_end=540
79
+ _DELETERUNTIMEENVIFPOSSIBLEREQUEST._serialized_start=543
80
+ _DELETERUNTIMEENVIFPOSSIBLEREQUEST._serialized_end=671
81
+ _DELETERUNTIMEENVIFPOSSIBLEREPLY._serialized_start=673
82
+ _DELETERUNTIMEENVIFPOSSIBLEREPLY._serialized_end=792
83
+ _GETRUNTIMEENVSINFOREQUEST._serialized_start=794
84
+ _GETRUNTIMEENVSINFOREQUEST._serialized_end=858
85
+ _GETRUNTIMEENVSINFOREPLY._serialized_start=860
86
+ _GETRUNTIMEENVSINFOREPLY._serialized_end=979
87
+ # @@protoc_insertion_point(module_scope)
infer_4_37_2/lib/python3.10/site-packages/ray/core/generated/runtime_env_common_pb2_grpc.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
janus/lib/python3.10/_weakrefset.py ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Access WeakSet through the weakref module.
2
+ # This code is separated-out because it is needed
3
+ # by abc.py to load everything else at startup.
4
+
5
+ from _weakref import ref
6
+ from types import GenericAlias
7
+
8
+ __all__ = ['WeakSet']
9
+
10
+
11
+ class _IterationGuard:
12
+ # This context manager registers itself in the current iterators of the
13
+ # weak container, such as to delay all removals until the context manager
14
+ # exits.
15
+ # This technique should be relatively thread-safe (since sets are).
16
+
17
+ def __init__(self, weakcontainer):
18
+ # Don't create cycles
19
+ self.weakcontainer = ref(weakcontainer)
20
+
21
+ def __enter__(self):
22
+ w = self.weakcontainer()
23
+ if w is not None:
24
+ w._iterating.add(self)
25
+ return self
26
+
27
+ def __exit__(self, e, t, b):
28
+ w = self.weakcontainer()
29
+ if w is not None:
30
+ s = w._iterating
31
+ s.remove(self)
32
+ if not s:
33
+ w._commit_removals()
34
+
35
+
36
+ class WeakSet:
37
+ def __init__(self, data=None):
38
+ self.data = set()
39
+ def _remove(item, selfref=ref(self)):
40
+ self = selfref()
41
+ if self is not None:
42
+ if self._iterating:
43
+ self._pending_removals.append(item)
44
+ else:
45
+ self.data.discard(item)
46
+ self._remove = _remove
47
+ # A list of keys to be removed
48
+ self._pending_removals = []
49
+ self._iterating = set()
50
+ if data is not None:
51
+ self.update(data)
52
+
53
+ def _commit_removals(self):
54
+ pop = self._pending_removals.pop
55
+ discard = self.data.discard
56
+ while True:
57
+ try:
58
+ item = pop()
59
+ except IndexError:
60
+ return
61
+ discard(item)
62
+
63
+ def __iter__(self):
64
+ with _IterationGuard(self):
65
+ for itemref in self.data:
66
+ item = itemref()
67
+ if item is not None:
68
+ # Caveat: the iterator will keep a strong reference to
69
+ # `item` until it is resumed or closed.
70
+ yield item
71
+
72
+ def __len__(self):
73
+ return len(self.data) - len(self._pending_removals)
74
+
75
+ def __contains__(self, item):
76
+ try:
77
+ wr = ref(item)
78
+ except TypeError:
79
+ return False
80
+ return wr in self.data
81
+
82
+ def __reduce__(self):
83
+ return (self.__class__, (list(self),),
84
+ getattr(self, '__dict__', None))
85
+
86
+ def add(self, item):
87
+ if self._pending_removals:
88
+ self._commit_removals()
89
+ self.data.add(ref(item, self._remove))
90
+
91
+ def clear(self):
92
+ if self._pending_removals:
93
+ self._commit_removals()
94
+ self.data.clear()
95
+
96
+ def copy(self):
97
+ return self.__class__(self)
98
+
99
+ def pop(self):
100
+ if self._pending_removals:
101
+ self._commit_removals()
102
+ while True:
103
+ try:
104
+ itemref = self.data.pop()
105
+ except KeyError:
106
+ raise KeyError('pop from empty WeakSet') from None
107
+ item = itemref()
108
+ if item is not None:
109
+ return item
110
+
111
+ def remove(self, item):
112
+ if self._pending_removals:
113
+ self._commit_removals()
114
+ self.data.remove(ref(item))
115
+
116
+ def discard(self, item):
117
+ if self._pending_removals:
118
+ self._commit_removals()
119
+ self.data.discard(ref(item))
120
+
121
+ def update(self, other):
122
+ if self._pending_removals:
123
+ self._commit_removals()
124
+ for element in other:
125
+ self.add(element)
126
+
127
+ def __ior__(self, other):
128
+ self.update(other)
129
+ return self
130
+
131
+ def difference(self, other):
132
+ newset = self.copy()
133
+ newset.difference_update(other)
134
+ return newset
135
+ __sub__ = difference
136
+
137
+ def difference_update(self, other):
138
+ self.__isub__(other)
139
+ def __isub__(self, other):
140
+ if self._pending_removals:
141
+ self._commit_removals()
142
+ if self is other:
143
+ self.data.clear()
144
+ else:
145
+ self.data.difference_update(ref(item) for item in other)
146
+ return self
147
+
148
+ def intersection(self, other):
149
+ return self.__class__(item for item in other if item in self)
150
+ __and__ = intersection
151
+
152
+ def intersection_update(self, other):
153
+ self.__iand__(other)
154
+ def __iand__(self, other):
155
+ if self._pending_removals:
156
+ self._commit_removals()
157
+ self.data.intersection_update(ref(item) for item in other)
158
+ return self
159
+
160
+ def issubset(self, other):
161
+ return self.data.issubset(ref(item) for item in other)
162
+ __le__ = issubset
163
+
164
+ def __lt__(self, other):
165
+ return self.data < set(map(ref, other))
166
+
167
+ def issuperset(self, other):
168
+ return self.data.issuperset(ref(item) for item in other)
169
+ __ge__ = issuperset
170
+
171
+ def __gt__(self, other):
172
+ return self.data > set(map(ref, other))
173
+
174
+ def __eq__(self, other):
175
+ if not isinstance(other, self.__class__):
176
+ return NotImplemented
177
+ return self.data == set(map(ref, other))
178
+
179
+ def symmetric_difference(self, other):
180
+ newset = self.copy()
181
+ newset.symmetric_difference_update(other)
182
+ return newset
183
+ __xor__ = symmetric_difference
184
+
185
+ def symmetric_difference_update(self, other):
186
+ self.__ixor__(other)
187
+ def __ixor__(self, other):
188
+ if self._pending_removals:
189
+ self._commit_removals()
190
+ if self is other:
191
+ self.data.clear()
192
+ else:
193
+ self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
194
+ return self
195
+
196
+ def union(self, other):
197
+ return self.__class__(e for s in (self, other) for e in s)
198
+ __or__ = union
199
+
200
+ def isdisjoint(self, other):
201
+ return len(self.intersection(other)) == 0
202
+
203
+ def __repr__(self):
204
+ return repr(self.data)
205
+
206
+ __class_getitem__ = classmethod(GenericAlias)
janus/lib/python3.10/asyncore.py ADDED
@@ -0,0 +1,649 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- Mode: Python -*-
2
+ # Id: asyncore.py,v 2.51 2000/09/07 22:29:26 rushing Exp
3
+ # Author: Sam Rushing <[email protected]>
4
+
5
+ # ======================================================================
6
+ # Copyright 1996 by Sam Rushing
7
+ #
8
+ # All Rights Reserved
9
+ #
10
+ # Permission to use, copy, modify, and distribute this software and
11
+ # its documentation for any purpose and without fee is hereby
12
+ # granted, provided that the above copyright notice appear in all
13
+ # copies and that both that copyright notice and this permission
14
+ # notice appear in supporting documentation, and that the name of Sam
15
+ # Rushing not be used in advertising or publicity pertaining to
16
+ # distribution of the software without specific, written prior
17
+ # permission.
18
+ #
19
+ # SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
20
+ # INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
21
+ # NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
22
+ # CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
23
+ # OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
24
+ # NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
25
+ # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
26
+ # ======================================================================
27
+
28
+ """Basic infrastructure for asynchronous socket service clients and servers.
29
+
30
+ There are only two ways to have a program on a single processor do "more
31
+ than one thing at a time". Multi-threaded programming is the simplest and
32
+ most popular way to do it, but there is another very different technique,
33
+ that lets you have nearly all the advantages of multi-threading, without
34
+ actually using multiple threads. it's really only practical if your program
35
+ is largely I/O bound. If your program is CPU bound, then pre-emptive
36
+ scheduled threads are probably what you really need. Network servers are
37
+ rarely CPU-bound, however.
38
+
39
+ If your operating system supports the select() system call in its I/O
40
+ library (and nearly all do), then you can use it to juggle multiple
41
+ communication channels at once; doing other work while your I/O is taking
42
+ place in the "background." Although this strategy can seem strange and
43
+ complex, especially at first, it is in many ways easier to understand and
44
+ control than multi-threaded programming. The module documented here solves
45
+ many of the difficult problems for you, making the task of building
46
+ sophisticated high-performance network servers and clients a snap.
47
+ """
48
+
49
+ import select
50
+ import socket
51
+ import sys
52
+ import time
53
+ import warnings
54
+
55
+ import os
56
+ from errno import EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL, \
57
+ ENOTCONN, ESHUTDOWN, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, \
58
+ errorcode
59
+
60
+ warnings.warn(
61
+ 'The asyncore module is deprecated and will be removed in Python 3.12. '
62
+ 'The recommended replacement is asyncio',
63
+ DeprecationWarning,
64
+ stacklevel=2)
65
+
66
+
67
+ _DISCONNECTED = frozenset({ECONNRESET, ENOTCONN, ESHUTDOWN, ECONNABORTED, EPIPE,
68
+ EBADF})
69
+
70
+ try:
71
+ socket_map
72
+ except NameError:
73
+ socket_map = {}
74
+
75
+ def _strerror(err):
76
+ try:
77
+ return os.strerror(err)
78
+ except (ValueError, OverflowError, NameError):
79
+ if err in errorcode:
80
+ return errorcode[err]
81
+ return "Unknown error %s" %err
82
+
83
+ class ExitNow(Exception):
84
+ pass
85
+
86
+ _reraised_exceptions = (ExitNow, KeyboardInterrupt, SystemExit)
87
+
88
+ def read(obj):
89
+ try:
90
+ obj.handle_read_event()
91
+ except _reraised_exceptions:
92
+ raise
93
+ except:
94
+ obj.handle_error()
95
+
96
+ def write(obj):
97
+ try:
98
+ obj.handle_write_event()
99
+ except _reraised_exceptions:
100
+ raise
101
+ except:
102
+ obj.handle_error()
103
+
104
+ def _exception(obj):
105
+ try:
106
+ obj.handle_expt_event()
107
+ except _reraised_exceptions:
108
+ raise
109
+ except:
110
+ obj.handle_error()
111
+
112
+ def readwrite(obj, flags):
113
+ try:
114
+ if flags & select.POLLIN:
115
+ obj.handle_read_event()
116
+ if flags & select.POLLOUT:
117
+ obj.handle_write_event()
118
+ if flags & select.POLLPRI:
119
+ obj.handle_expt_event()
120
+ if flags & (select.POLLHUP | select.POLLERR | select.POLLNVAL):
121
+ obj.handle_close()
122
+ except OSError as e:
123
+ if e.errno not in _DISCONNECTED:
124
+ obj.handle_error()
125
+ else:
126
+ obj.handle_close()
127
+ except _reraised_exceptions:
128
+ raise
129
+ except:
130
+ obj.handle_error()
131
+
132
+ def poll(timeout=0.0, map=None):
133
+ if map is None:
134
+ map = socket_map
135
+ if map:
136
+ r = []; w = []; e = []
137
+ for fd, obj in list(map.items()):
138
+ is_r = obj.readable()
139
+ is_w = obj.writable()
140
+ if is_r:
141
+ r.append(fd)
142
+ # accepting sockets should not be writable
143
+ if is_w and not obj.accepting:
144
+ w.append(fd)
145
+ if is_r or is_w:
146
+ e.append(fd)
147
+ if [] == r == w == e:
148
+ time.sleep(timeout)
149
+ return
150
+
151
+ r, w, e = select.select(r, w, e, timeout)
152
+
153
+ for fd in r:
154
+ obj = map.get(fd)
155
+ if obj is None:
156
+ continue
157
+ read(obj)
158
+
159
+ for fd in w:
160
+ obj = map.get(fd)
161
+ if obj is None:
162
+ continue
163
+ write(obj)
164
+
165
+ for fd in e:
166
+ obj = map.get(fd)
167
+ if obj is None:
168
+ continue
169
+ _exception(obj)
170
+
171
+ def poll2(timeout=0.0, map=None):
172
+ # Use the poll() support added to the select module in Python 2.0
173
+ if map is None:
174
+ map = socket_map
175
+ if timeout is not None:
176
+ # timeout is in milliseconds
177
+ timeout = int(timeout*1000)
178
+ pollster = select.poll()
179
+ if map:
180
+ for fd, obj in list(map.items()):
181
+ flags = 0
182
+ if obj.readable():
183
+ flags |= select.POLLIN | select.POLLPRI
184
+ # accepting sockets should not be writable
185
+ if obj.writable() and not obj.accepting:
186
+ flags |= select.POLLOUT
187
+ if flags:
188
+ pollster.register(fd, flags)
189
+
190
+ r = pollster.poll(timeout)
191
+ for fd, flags in r:
192
+ obj = map.get(fd)
193
+ if obj is None:
194
+ continue
195
+ readwrite(obj, flags)
196
+
197
+ poll3 = poll2 # Alias for backward compatibility
198
+
199
+ def loop(timeout=30.0, use_poll=False, map=None, count=None):
200
+ if map is None:
201
+ map = socket_map
202
+
203
+ if use_poll and hasattr(select, 'poll'):
204
+ poll_fun = poll2
205
+ else:
206
+ poll_fun = poll
207
+
208
+ if count is None:
209
+ while map:
210
+ poll_fun(timeout, map)
211
+
212
+ else:
213
+ while map and count > 0:
214
+ poll_fun(timeout, map)
215
+ count = count - 1
216
+
217
+ class dispatcher:
218
+
219
+ debug = False
220
+ connected = False
221
+ accepting = False
222
+ connecting = False
223
+ closing = False
224
+ addr = None
225
+ ignore_log_types = frozenset({'warning'})
226
+
227
+ def __init__(self, sock=None, map=None):
228
+ if map is None:
229
+ self._map = socket_map
230
+ else:
231
+ self._map = map
232
+
233
+ self._fileno = None
234
+
235
+ if sock:
236
+ # Set to nonblocking just to make sure for cases where we
237
+ # get a socket from a blocking source.
238
+ sock.setblocking(False)
239
+ self.set_socket(sock, map)
240
+ self.connected = True
241
+ # The constructor no longer requires that the socket
242
+ # passed be connected.
243
+ try:
244
+ self.addr = sock.getpeername()
245
+ except OSError as err:
246
+ if err.errno in (ENOTCONN, EINVAL):
247
+ # To handle the case where we got an unconnected
248
+ # socket.
249
+ self.connected = False
250
+ else:
251
+ # The socket is broken in some unknown way, alert
252
+ # the user and remove it from the map (to prevent
253
+ # polling of broken sockets).
254
+ self.del_channel(map)
255
+ raise
256
+ else:
257
+ self.socket = None
258
+
259
+ def __repr__(self):
260
+ status = [self.__class__.__module__+"."+self.__class__.__qualname__]
261
+ if self.accepting and self.addr:
262
+ status.append('listening')
263
+ elif self.connected:
264
+ status.append('connected')
265
+ if self.addr is not None:
266
+ try:
267
+ status.append('%s:%d' % self.addr)
268
+ except TypeError:
269
+ status.append(repr(self.addr))
270
+ return '<%s at %#x>' % (' '.join(status), id(self))
271
+
272
+ def add_channel(self, map=None):
273
+ #self.log_info('adding channel %s' % self)
274
+ if map is None:
275
+ map = self._map
276
+ map[self._fileno] = self
277
+
278
+ def del_channel(self, map=None):
279
+ fd = self._fileno
280
+ if map is None:
281
+ map = self._map
282
+ if fd in map:
283
+ #self.log_info('closing channel %d:%s' % (fd, self))
284
+ del map[fd]
285
+ self._fileno = None
286
+
287
+ def create_socket(self, family=socket.AF_INET, type=socket.SOCK_STREAM):
288
+ self.family_and_type = family, type
289
+ sock = socket.socket(family, type)
290
+ sock.setblocking(False)
291
+ self.set_socket(sock)
292
+
293
+ def set_socket(self, sock, map=None):
294
+ self.socket = sock
295
+ self._fileno = sock.fileno()
296
+ self.add_channel(map)
297
+
298
+ def set_reuse_addr(self):
299
+ # try to re-use a server port if possible
300
+ try:
301
+ self.socket.setsockopt(
302
+ socket.SOL_SOCKET, socket.SO_REUSEADDR,
303
+ self.socket.getsockopt(socket.SOL_SOCKET,
304
+ socket.SO_REUSEADDR) | 1
305
+ )
306
+ except OSError:
307
+ pass
308
+
309
+ # ==================================================
310
+ # predicates for select()
311
+ # these are used as filters for the lists of sockets
312
+ # to pass to select().
313
+ # ==================================================
314
+
315
+ def readable(self):
316
+ return True
317
+
318
+ def writable(self):
319
+ return True
320
+
321
+ # ==================================================
322
+ # socket object methods.
323
+ # ==================================================
324
+
325
+ def listen(self, num):
326
+ self.accepting = True
327
+ if os.name == 'nt' and num > 5:
328
+ num = 5
329
+ return self.socket.listen(num)
330
+
331
+ def bind(self, addr):
332
+ self.addr = addr
333
+ return self.socket.bind(addr)
334
+
335
+ def connect(self, address):
336
+ self.connected = False
337
+ self.connecting = True
338
+ err = self.socket.connect_ex(address)
339
+ if err in (EINPROGRESS, EALREADY, EWOULDBLOCK) \
340
+ or err == EINVAL and os.name == 'nt':
341
+ self.addr = address
342
+ return
343
+ if err in (0, EISCONN):
344
+ self.addr = address
345
+ self.handle_connect_event()
346
+ else:
347
+ raise OSError(err, errorcode[err])
348
+
349
+ def accept(self):
350
+ # XXX can return either an address pair or None
351
+ try:
352
+ conn, addr = self.socket.accept()
353
+ except TypeError:
354
+ return None
355
+ except OSError as why:
356
+ if why.errno in (EWOULDBLOCK, ECONNABORTED, EAGAIN):
357
+ return None
358
+ else:
359
+ raise
360
+ else:
361
+ return conn, addr
362
+
363
+ def send(self, data):
364
+ try:
365
+ result = self.socket.send(data)
366
+ return result
367
+ except OSError as why:
368
+ if why.errno == EWOULDBLOCK:
369
+ return 0
370
+ elif why.errno in _DISCONNECTED:
371
+ self.handle_close()
372
+ return 0
373
+ else:
374
+ raise
375
+
376
+ def recv(self, buffer_size):
377
+ try:
378
+ data = self.socket.recv(buffer_size)
379
+ if not data:
380
+ # a closed connection is indicated by signaling
381
+ # a read condition, and having recv() return 0.
382
+ self.handle_close()
383
+ return b''
384
+ else:
385
+ return data
386
+ except OSError as why:
387
+ # winsock sometimes raises ENOTCONN
388
+ if why.errno in _DISCONNECTED:
389
+ self.handle_close()
390
+ return b''
391
+ else:
392
+ raise
393
+
394
+ def close(self):
395
+ self.connected = False
396
+ self.accepting = False
397
+ self.connecting = False
398
+ self.del_channel()
399
+ if self.socket is not None:
400
+ try:
401
+ self.socket.close()
402
+ except OSError as why:
403
+ if why.errno not in (ENOTCONN, EBADF):
404
+ raise
405
+
406
+ # log and log_info may be overridden to provide more sophisticated
407
+ # logging and warning methods. In general, log is for 'hit' logging
408
+ # and 'log_info' is for informational, warning and error logging.
409
+
410
+ def log(self, message):
411
+ sys.stderr.write('log: %s\n' % str(message))
412
+
413
+ def log_info(self, message, type='info'):
414
+ if type not in self.ignore_log_types:
415
+ print('%s: %s' % (type, message))
416
+
417
+ def handle_read_event(self):
418
+ if self.accepting:
419
+ # accepting sockets are never connected, they "spawn" new
420
+ # sockets that are connected
421
+ self.handle_accept()
422
+ elif not self.connected:
423
+ if self.connecting:
424
+ self.handle_connect_event()
425
+ self.handle_read()
426
+ else:
427
+ self.handle_read()
428
+
429
+ def handle_connect_event(self):
430
+ err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
431
+ if err != 0:
432
+ raise OSError(err, _strerror(err))
433
+ self.handle_connect()
434
+ self.connected = True
435
+ self.connecting = False
436
+
437
+ def handle_write_event(self):
438
+ if self.accepting:
439
+ # Accepting sockets shouldn't get a write event.
440
+ # We will pretend it didn't happen.
441
+ return
442
+
443
+ if not self.connected:
444
+ if self.connecting:
445
+ self.handle_connect_event()
446
+ self.handle_write()
447
+
448
+ def handle_expt_event(self):
449
+ # handle_expt_event() is called if there might be an error on the
450
+ # socket, or if there is OOB data
451
+ # check for the error condition first
452
+ err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
453
+ if err != 0:
454
+ # we can get here when select.select() says that there is an
455
+ # exceptional condition on the socket
456
+ # since there is an error, we'll go ahead and close the socket
457
+ # like we would in a subclassed handle_read() that received no
458
+ # data
459
+ self.handle_close()
460
+ else:
461
+ self.handle_expt()
462
+
463
+ def handle_error(self):
464
+ nil, t, v, tbinfo = compact_traceback()
465
+
466
+ # sometimes a user repr method will crash.
467
+ try:
468
+ self_repr = repr(self)
469
+ except:
470
+ self_repr = '<__repr__(self) failed for object at %0x>' % id(self)
471
+
472
+ self.log_info(
473
+ 'uncaptured python exception, closing channel %s (%s:%s %s)' % (
474
+ self_repr,
475
+ t,
476
+ v,
477
+ tbinfo
478
+ ),
479
+ 'error'
480
+ )
481
+ self.handle_close()
482
+
483
+ def handle_expt(self):
484
+ self.log_info('unhandled incoming priority event', 'warning')
485
+
486
+ def handle_read(self):
487
+ self.log_info('unhandled read event', 'warning')
488
+
489
+ def handle_write(self):
490
+ self.log_info('unhandled write event', 'warning')
491
+
492
+ def handle_connect(self):
493
+ self.log_info('unhandled connect event', 'warning')
494
+
495
+ def handle_accept(self):
496
+ pair = self.accept()
497
+ if pair is not None:
498
+ self.handle_accepted(*pair)
499
+
500
+ def handle_accepted(self, sock, addr):
501
+ sock.close()
502
+ self.log_info('unhandled accepted event', 'warning')
503
+
504
+ def handle_close(self):
505
+ self.log_info('unhandled close event', 'warning')
506
+ self.close()
507
+
508
+ # ---------------------------------------------------------------------------
509
+ # adds simple buffered output capability, useful for simple clients.
510
+ # [for more sophisticated usage use asynchat.async_chat]
511
+ # ---------------------------------------------------------------------------
512
+
513
+ class dispatcher_with_send(dispatcher):
514
+
515
+ def __init__(self, sock=None, map=None):
516
+ dispatcher.__init__(self, sock, map)
517
+ self.out_buffer = b''
518
+
519
+ def initiate_send(self):
520
+ num_sent = 0
521
+ num_sent = dispatcher.send(self, self.out_buffer[:65536])
522
+ self.out_buffer = self.out_buffer[num_sent:]
523
+
524
+ def handle_write(self):
525
+ self.initiate_send()
526
+
527
+ def writable(self):
528
+ return (not self.connected) or len(self.out_buffer)
529
+
530
+ def send(self, data):
531
+ if self.debug:
532
+ self.log_info('sending %s' % repr(data))
533
+ self.out_buffer = self.out_buffer + data
534
+ self.initiate_send()
535
+
536
+ # ---------------------------------------------------------------------------
537
+ # used for debugging.
538
+ # ---------------------------------------------------------------------------
539
+
540
+ def compact_traceback():
541
+ t, v, tb = sys.exc_info()
542
+ tbinfo = []
543
+ if not tb: # Must have a traceback
544
+ raise AssertionError("traceback does not exist")
545
+ while tb:
546
+ tbinfo.append((
547
+ tb.tb_frame.f_code.co_filename,
548
+ tb.tb_frame.f_code.co_name,
549
+ str(tb.tb_lineno)
550
+ ))
551
+ tb = tb.tb_next
552
+
553
+ # just to be safe
554
+ del tb
555
+
556
+ file, function, line = tbinfo[-1]
557
+ info = ' '.join(['[%s|%s|%s]' % x for x in tbinfo])
558
+ return (file, function, line), t, v, info
559
+
560
+ def close_all(map=None, ignore_all=False):
561
+ if map is None:
562
+ map = socket_map
563
+ for x in list(map.values()):
564
+ try:
565
+ x.close()
566
+ except OSError as x:
567
+ if x.errno == EBADF:
568
+ pass
569
+ elif not ignore_all:
570
+ raise
571
+ except _reraised_exceptions:
572
+ raise
573
+ except:
574
+ if not ignore_all:
575
+ raise
576
+ map.clear()
577
+
578
+ # Asynchronous File I/O:
579
+ #
580
+ # After a little research (reading man pages on various unixen, and
581
+ # digging through the linux kernel), I've determined that select()
582
+ # isn't meant for doing asynchronous file i/o.
583
+ # Heartening, though - reading linux/mm/filemap.c shows that linux
584
+ # supports asynchronous read-ahead. So _MOST_ of the time, the data
585
+ # will be sitting in memory for us already when we go to read it.
586
+ #
587
+ # What other OS's (besides NT) support async file i/o? [VMS?]
588
+ #
589
+ # Regardless, this is useful for pipes, and stdin/stdout...
590
+
591
+ if os.name == 'posix':
592
+ class file_wrapper:
593
+ # Here we override just enough to make a file
594
+ # look like a socket for the purposes of asyncore.
595
+ # The passed fd is automatically os.dup()'d
596
+
597
+ def __init__(self, fd):
598
+ self.fd = os.dup(fd)
599
+
600
+ def __del__(self):
601
+ if self.fd >= 0:
602
+ warnings.warn("unclosed file %r" % self, ResourceWarning,
603
+ source=self)
604
+ self.close()
605
+
606
+ def recv(self, *args):
607
+ return os.read(self.fd, *args)
608
+
609
+ def send(self, *args):
610
+ return os.write(self.fd, *args)
611
+
612
+ def getsockopt(self, level, optname, buflen=None):
613
+ if (level == socket.SOL_SOCKET and
614
+ optname == socket.SO_ERROR and
615
+ not buflen):
616
+ return 0
617
+ raise NotImplementedError("Only asyncore specific behaviour "
618
+ "implemented.")
619
+
620
+ read = recv
621
+ write = send
622
+
623
+ def close(self):
624
+ if self.fd < 0:
625
+ return
626
+ fd = self.fd
627
+ self.fd = -1
628
+ os.close(fd)
629
+
630
+ def fileno(self):
631
+ return self.fd
632
+
633
+ class file_dispatcher(dispatcher):
634
+
635
+ def __init__(self, fd, map=None):
636
+ dispatcher.__init__(self, None, map)
637
+ self.connected = True
638
+ try:
639
+ fd = fd.fileno()
640
+ except AttributeError:
641
+ pass
642
+ self.set_file(fd)
643
+ # set it to non-blocking mode
644
+ os.set_blocking(fd, False)
645
+
646
+ def set_file(self, fd):
647
+ self.socket = file_wrapper(fd)
648
+ self._fileno = self.socket.fileno()
649
+ self.add_channel()
janus/lib/python3.10/bisect.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bisection algorithms."""
2
+
3
+
4
+ def insort_right(a, x, lo=0, hi=None, *, key=None):
5
+ """Insert item x in list a, and keep it sorted assuming a is sorted.
6
+
7
+ If x is already in a, insert it to the right of the rightmost x.
8
+
9
+ Optional args lo (default 0) and hi (default len(a)) bound the
10
+ slice of a to be searched.
11
+ """
12
+ if key is None:
13
+ lo = bisect_right(a, x, lo, hi)
14
+ else:
15
+ lo = bisect_right(a, key(x), lo, hi, key=key)
16
+ a.insert(lo, x)
17
+
18
+
19
+ def bisect_right(a, x, lo=0, hi=None, *, key=None):
20
+ """Return the index where to insert item x in list a, assuming a is sorted.
21
+
22
+ The return value i is such that all e in a[:i] have e <= x, and all e in
23
+ a[i:] have e > x. So if x already appears in the list, a.insert(i, x) will
24
+ insert just after the rightmost x already there.
25
+
26
+ Optional args lo (default 0) and hi (default len(a)) bound the
27
+ slice of a to be searched.
28
+ """
29
+
30
+ if lo < 0:
31
+ raise ValueError('lo must be non-negative')
32
+ if hi is None:
33
+ hi = len(a)
34
+ # Note, the comparison uses "<" to match the
35
+ # __lt__() logic in list.sort() and in heapq.
36
+ if key is None:
37
+ while lo < hi:
38
+ mid = (lo + hi) // 2
39
+ if x < a[mid]:
40
+ hi = mid
41
+ else:
42
+ lo = mid + 1
43
+ else:
44
+ while lo < hi:
45
+ mid = (lo + hi) // 2
46
+ if x < key(a[mid]):
47
+ hi = mid
48
+ else:
49
+ lo = mid + 1
50
+ return lo
51
+
52
+
53
+ def insort_left(a, x, lo=0, hi=None, *, key=None):
54
+ """Insert item x in list a, and keep it sorted assuming a is sorted.
55
+
56
+ If x is already in a, insert it to the left of the leftmost x.
57
+
58
+ Optional args lo (default 0) and hi (default len(a)) bound the
59
+ slice of a to be searched.
60
+ """
61
+
62
+ if key is None:
63
+ lo = bisect_left(a, x, lo, hi)
64
+ else:
65
+ lo = bisect_left(a, key(x), lo, hi, key=key)
66
+ a.insert(lo, x)
67
+
68
+ def bisect_left(a, x, lo=0, hi=None, *, key=None):
69
+ """Return the index where to insert item x in list a, assuming a is sorted.
70
+
71
+ The return value i is such that all e in a[:i] have e < x, and all e in
72
+ a[i:] have e >= x. So if x already appears in the list, a.insert(i, x) will
73
+ insert just before the leftmost x already there.
74
+
75
+ Optional args lo (default 0) and hi (default len(a)) bound the
76
+ slice of a to be searched.
77
+ """
78
+
79
+ if lo < 0:
80
+ raise ValueError('lo must be non-negative')
81
+ if hi is None:
82
+ hi = len(a)
83
+ # Note, the comparison uses "<" to match the
84
+ # __lt__() logic in list.sort() and in heapq.
85
+ if key is None:
86
+ while lo < hi:
87
+ mid = (lo + hi) // 2
88
+ if a[mid] < x:
89
+ lo = mid + 1
90
+ else:
91
+ hi = mid
92
+ else:
93
+ while lo < hi:
94
+ mid = (lo + hi) // 2
95
+ if key(a[mid]) < x:
96
+ lo = mid + 1
97
+ else:
98
+ hi = mid
99
+ return lo
100
+
101
+
102
+ # Overwrite above definitions with a fast C implementation
103
+ try:
104
+ from _bisect import *
105
+ except ImportError:
106
+ pass
107
+
108
+ # Create aliases
109
+ bisect = bisect_right
110
+ insort = insort_right
janus/lib/python3.10/configparser.py ADDED
@@ -0,0 +1,1368 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Configuration file parser.
2
+
3
+ A configuration file consists of sections, lead by a "[section]" header,
4
+ and followed by "name: value" entries, with continuations and such in
5
+ the style of RFC 822.
6
+
7
+ Intrinsic defaults can be specified by passing them into the
8
+ ConfigParser constructor as a dictionary.
9
+
10
+ class:
11
+
12
+ ConfigParser -- responsible for parsing a list of
13
+ configuration files, and managing the parsed database.
14
+
15
+ methods:
16
+
17
+ __init__(defaults=None, dict_type=_default_dict, allow_no_value=False,
18
+ delimiters=('=', ':'), comment_prefixes=('#', ';'),
19
+ inline_comment_prefixes=None, strict=True,
20
+ empty_lines_in_values=True, default_section='DEFAULT',
21
+ interpolation=<unset>, converters=<unset>):
22
+
23
+ Create the parser. When `defaults` is given, it is initialized into the
24
+ dictionary or intrinsic defaults. The keys must be strings, the values
25
+ must be appropriate for %()s string interpolation.
26
+
27
+ When `dict_type` is given, it will be used to create the dictionary
28
+ objects for the list of sections, for the options within a section, and
29
+ for the default values.
30
+
31
+ When `delimiters` is given, it will be used as the set of substrings
32
+ that divide keys from values.
33
+
34
+ When `comment_prefixes` is given, it will be used as the set of
35
+ substrings that prefix comments in empty lines. Comments can be
36
+ indented.
37
+
38
+ When `inline_comment_prefixes` is given, it will be used as the set of
39
+ substrings that prefix comments in non-empty lines.
40
+
41
+ When `strict` is True, the parser won't allow for any section or option
42
+ duplicates while reading from a single source (file, string or
43
+ dictionary). Default is True.
44
+
45
+ When `empty_lines_in_values` is False (default: True), each empty line
46
+ marks the end of an option. Otherwise, internal empty lines of
47
+ a multiline option are kept as part of the value.
48
+
49
+ When `allow_no_value` is True (default: False), options without
50
+ values are accepted; the value presented for these is None.
51
+
52
+ When `default_section` is given, the name of the special section is
53
+ named accordingly. By default it is called ``"DEFAULT"`` but this can
54
+ be customized to point to any other valid section name. Its current
55
+ value can be retrieved using the ``parser_instance.default_section``
56
+ attribute and may be modified at runtime.
57
+
58
+ When `interpolation` is given, it should be an Interpolation subclass
59
+ instance. It will be used as the handler for option value
60
+ pre-processing when using getters. RawConfigParser objects don't do
61
+ any sort of interpolation, whereas ConfigParser uses an instance of
62
+ BasicInterpolation. The library also provides a ``zc.buildbot``
63
+ inspired ExtendedInterpolation implementation.
64
+
65
+ When `converters` is given, it should be a dictionary where each key
66
+ represents the name of a type converter and each value is a callable
67
+ implementing the conversion from string to the desired datatype. Every
68
+ converter gets its corresponding get*() method on the parser object and
69
+ section proxies.
70
+
71
+ sections()
72
+ Return all the configuration section names, sans DEFAULT.
73
+
74
+ has_section(section)
75
+ Return whether the given section exists.
76
+
77
+ has_option(section, option)
78
+ Return whether the given option exists in the given section.
79
+
80
+ options(section)
81
+ Return list of configuration options for the named section.
82
+
83
+ read(filenames, encoding=None)
84
+ Read and parse the iterable of named configuration files, given by
85
+ name. A single filename is also allowed. Non-existing files
86
+ are ignored. Return list of successfully read files.
87
+
88
+ read_file(f, filename=None)
89
+ Read and parse one configuration file, given as a file object.
90
+ The filename defaults to f.name; it is only used in error
91
+ messages (if f has no `name` attribute, the string `<???>` is used).
92
+
93
+ read_string(string)
94
+ Read configuration from a given string.
95
+
96
+ read_dict(dictionary)
97
+ Read configuration from a dictionary. Keys are section names,
98
+ values are dictionaries with keys and values that should be present
99
+ in the section. If the used dictionary type preserves order, sections
100
+ and their keys will be added in order. Values are automatically
101
+ converted to strings.
102
+
103
+ get(section, option, raw=False, vars=None, fallback=_UNSET)
104
+ Return a string value for the named option. All % interpolations are
105
+ expanded in the return values, based on the defaults passed into the
106
+ constructor and the DEFAULT section. Additional substitutions may be
107
+ provided using the `vars` argument, which must be a dictionary whose
108
+ contents override any pre-existing defaults. If `option` is a key in
109
+ `vars`, the value from `vars` is used.
110
+
111
+ getint(section, options, raw=False, vars=None, fallback=_UNSET)
112
+ Like get(), but convert value to an integer.
113
+
114
+ getfloat(section, options, raw=False, vars=None, fallback=_UNSET)
115
+ Like get(), but convert value to a float.
116
+
117
+ getboolean(section, options, raw=False, vars=None, fallback=_UNSET)
118
+ Like get(), but convert value to a boolean (currently case
119
+ insensitively defined as 0, false, no, off for False, and 1, true,
120
+ yes, on for True). Returns False or True.
121
+
122
+ items(section=_UNSET, raw=False, vars=None)
123
+ If section is given, return a list of tuples with (name, value) for
124
+ each option in the section. Otherwise, return a list of tuples with
125
+ (section_name, section_proxy) for each section, including DEFAULTSECT.
126
+
127
+ remove_section(section)
128
+ Remove the given file section and all its options.
129
+
130
+ remove_option(section, option)
131
+ Remove the given option from the given section.
132
+
133
+ set(section, option, value)
134
+ Set the given option.
135
+
136
+ write(fp, space_around_delimiters=True)
137
+ Write the configuration state in .ini format. If
138
+ `space_around_delimiters` is True (the default), delimiters
139
+ between keys and values are surrounded by spaces.
140
+ """
141
+
142
+ from collections.abc import MutableMapping
143
+ from collections import ChainMap as _ChainMap
144
+ import functools
145
+ import io
146
+ import itertools
147
+ import os
148
+ import re
149
+ import sys
150
+ import warnings
151
+
152
+ __all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError",
153
+ "NoOptionError", "InterpolationError", "InterpolationDepthError",
154
+ "InterpolationMissingOptionError", "InterpolationSyntaxError",
155
+ "ParsingError", "MissingSectionHeaderError",
156
+ "ConfigParser", "SafeConfigParser", "RawConfigParser",
157
+ "Interpolation", "BasicInterpolation", "ExtendedInterpolation",
158
+ "LegacyInterpolation", "SectionProxy", "ConverterMapping",
159
+ "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"]
160
+
161
+ _default_dict = dict
162
+ DEFAULTSECT = "DEFAULT"
163
+
164
+ MAX_INTERPOLATION_DEPTH = 10
165
+
166
+
167
+
168
+ # exception classes
169
+ class Error(Exception):
170
+ """Base class for ConfigParser exceptions."""
171
+
172
+ def __init__(self, msg=''):
173
+ self.message = msg
174
+ Exception.__init__(self, msg)
175
+
176
+ def __repr__(self):
177
+ return self.message
178
+
179
+ __str__ = __repr__
180
+
181
+
182
+ class NoSectionError(Error):
183
+ """Raised when no section matches a requested option."""
184
+
185
+ def __init__(self, section):
186
+ Error.__init__(self, 'No section: %r' % (section,))
187
+ self.section = section
188
+ self.args = (section, )
189
+
190
+
191
+ class DuplicateSectionError(Error):
192
+ """Raised when a section is repeated in an input source.
193
+
194
+ Possible repetitions that raise this exception are: multiple creation
195
+ using the API or in strict parsers when a section is found more than once
196
+ in a single input file, string or dictionary.
197
+ """
198
+
199
+ def __init__(self, section, source=None, lineno=None):
200
+ msg = [repr(section), " already exists"]
201
+ if source is not None:
202
+ message = ["While reading from ", repr(source)]
203
+ if lineno is not None:
204
+ message.append(" [line {0:2d}]".format(lineno))
205
+ message.append(": section ")
206
+ message.extend(msg)
207
+ msg = message
208
+ else:
209
+ msg.insert(0, "Section ")
210
+ Error.__init__(self, "".join(msg))
211
+ self.section = section
212
+ self.source = source
213
+ self.lineno = lineno
214
+ self.args = (section, source, lineno)
215
+
216
+
217
+ class DuplicateOptionError(Error):
218
+ """Raised by strict parsers when an option is repeated in an input source.
219
+
220
+ Current implementation raises this exception only when an option is found
221
+ more than once in a single file, string or dictionary.
222
+ """
223
+
224
+ def __init__(self, section, option, source=None, lineno=None):
225
+ msg = [repr(option), " in section ", repr(section),
226
+ " already exists"]
227
+ if source is not None:
228
+ message = ["While reading from ", repr(source)]
229
+ if lineno is not None:
230
+ message.append(" [line {0:2d}]".format(lineno))
231
+ message.append(": option ")
232
+ message.extend(msg)
233
+ msg = message
234
+ else:
235
+ msg.insert(0, "Option ")
236
+ Error.__init__(self, "".join(msg))
237
+ self.section = section
238
+ self.option = option
239
+ self.source = source
240
+ self.lineno = lineno
241
+ self.args = (section, option, source, lineno)
242
+
243
+
244
+ class NoOptionError(Error):
245
+ """A requested option was not found."""
246
+
247
+ def __init__(self, option, section):
248
+ Error.__init__(self, "No option %r in section: %r" %
249
+ (option, section))
250
+ self.option = option
251
+ self.section = section
252
+ self.args = (option, section)
253
+
254
+
255
+ class InterpolationError(Error):
256
+ """Base class for interpolation-related exceptions."""
257
+
258
+ def __init__(self, option, section, msg):
259
+ Error.__init__(self, msg)
260
+ self.option = option
261
+ self.section = section
262
+ self.args = (option, section, msg)
263
+
264
+
265
+ class InterpolationMissingOptionError(InterpolationError):
266
+ """A string substitution required a setting which was not available."""
267
+
268
+ def __init__(self, option, section, rawval, reference):
269
+ msg = ("Bad value substitution: option {!r} in section {!r} contains "
270
+ "an interpolation key {!r} which is not a valid option name. "
271
+ "Raw value: {!r}".format(option, section, reference, rawval))
272
+ InterpolationError.__init__(self, option, section, msg)
273
+ self.reference = reference
274
+ self.args = (option, section, rawval, reference)
275
+
276
+
277
+ class InterpolationSyntaxError(InterpolationError):
278
+ """Raised when the source text contains invalid syntax.
279
+
280
+ Current implementation raises this exception when the source text into
281
+ which substitutions are made does not conform to the required syntax.
282
+ """
283
+
284
+
285
+ class InterpolationDepthError(InterpolationError):
286
+ """Raised when substitutions are nested too deeply."""
287
+
288
+ def __init__(self, option, section, rawval):
289
+ msg = ("Recursion limit exceeded in value substitution: option {!r} "
290
+ "in section {!r} contains an interpolation key which "
291
+ "cannot be substituted in {} steps. Raw value: {!r}"
292
+ "".format(option, section, MAX_INTERPOLATION_DEPTH,
293
+ rawval))
294
+ InterpolationError.__init__(self, option, section, msg)
295
+ self.args = (option, section, rawval)
296
+
297
+
298
+ class ParsingError(Error):
299
+ """Raised when a configuration file does not follow legal syntax."""
300
+
301
+ def __init__(self, source=None, filename=None):
302
+ # Exactly one of `source'/`filename' arguments has to be given.
303
+ # `filename' kept for compatibility.
304
+ if filename and source:
305
+ raise ValueError("Cannot specify both `filename' and `source'. "
306
+ "Use `source'.")
307
+ elif not filename and not source:
308
+ raise ValueError("Required argument `source' not given.")
309
+ elif filename:
310
+ source = filename
311
+ Error.__init__(self, 'Source contains parsing errors: %r' % source)
312
+ self.source = source
313
+ self.errors = []
314
+ self.args = (source, )
315
+
316
+ @property
317
+ def filename(self):
318
+ """Deprecated, use `source'."""
319
+ warnings.warn(
320
+ "The 'filename' attribute will be removed in Python 3.12. "
321
+ "Use 'source' instead.",
322
+ DeprecationWarning, stacklevel=2
323
+ )
324
+ return self.source
325
+
326
+ @filename.setter
327
+ def filename(self, value):
328
+ """Deprecated, user `source'."""
329
+ warnings.warn(
330
+ "The 'filename' attribute will be removed in Python 3.12. "
331
+ "Use 'source' instead.",
332
+ DeprecationWarning, stacklevel=2
333
+ )
334
+ self.source = value
335
+
336
+ def append(self, lineno, line):
337
+ self.errors.append((lineno, line))
338
+ self.message += '\n\t[line %2d]: %s' % (lineno, line)
339
+
340
+
341
+ class MissingSectionHeaderError(ParsingError):
342
+ """Raised when a key-value pair is found before any section header."""
343
+
344
+ def __init__(self, filename, lineno, line):
345
+ Error.__init__(
346
+ self,
347
+ 'File contains no section headers.\nfile: %r, line: %d\n%r' %
348
+ (filename, lineno, line))
349
+ self.source = filename
350
+ self.lineno = lineno
351
+ self.line = line
352
+ self.args = (filename, lineno, line)
353
+
354
+
355
+ # Used in parser getters to indicate the default behaviour when a specific
356
+ # option is not found it to raise an exception. Created to enable `None` as
357
+ # a valid fallback value.
358
+ _UNSET = object()
359
+
360
+
361
+ class Interpolation:
362
+ """Dummy interpolation that passes the value through with no changes."""
363
+
364
+ def before_get(self, parser, section, option, value, defaults):
365
+ return value
366
+
367
+ def before_set(self, parser, section, option, value):
368
+ return value
369
+
370
+ def before_read(self, parser, section, option, value):
371
+ return value
372
+
373
+ def before_write(self, parser, section, option, value):
374
+ return value
375
+
376
+
377
+ class BasicInterpolation(Interpolation):
378
+ """Interpolation as implemented in the classic ConfigParser.
379
+
380
+ The option values can contain format strings which refer to other values in
381
+ the same section, or values in the special default section.
382
+
383
+ For example:
384
+
385
+ something: %(dir)s/whatever
386
+
387
+ would resolve the "%(dir)s" to the value of dir. All reference
388
+ expansions are done late, on demand. If a user needs to use a bare % in
389
+ a configuration file, she can escape it by writing %%. Other % usage
390
+ is considered a user error and raises `InterpolationSyntaxError`."""
391
+
392
+ _KEYCRE = re.compile(r"%\(([^)]+)\)s")
393
+
394
+ def before_get(self, parser, section, option, value, defaults):
395
+ L = []
396
+ self._interpolate_some(parser, option, L, value, section, defaults, 1)
397
+ return ''.join(L)
398
+
399
+ def before_set(self, parser, section, option, value):
400
+ tmp_value = value.replace('%%', '') # escaped percent signs
401
+ tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
402
+ if '%' in tmp_value:
403
+ raise ValueError("invalid interpolation syntax in %r at "
404
+ "position %d" % (value, tmp_value.find('%')))
405
+ return value
406
+
407
+ def _interpolate_some(self, parser, option, accum, rest, section, map,
408
+ depth):
409
+ rawval = parser.get(section, option, raw=True, fallback=rest)
410
+ if depth > MAX_INTERPOLATION_DEPTH:
411
+ raise InterpolationDepthError(option, section, rawval)
412
+ while rest:
413
+ p = rest.find("%")
414
+ if p < 0:
415
+ accum.append(rest)
416
+ return
417
+ if p > 0:
418
+ accum.append(rest[:p])
419
+ rest = rest[p:]
420
+ # p is no longer used
421
+ c = rest[1:2]
422
+ if c == "%":
423
+ accum.append("%")
424
+ rest = rest[2:]
425
+ elif c == "(":
426
+ m = self._KEYCRE.match(rest)
427
+ if m is None:
428
+ raise InterpolationSyntaxError(option, section,
429
+ "bad interpolation variable reference %r" % rest)
430
+ var = parser.optionxform(m.group(1))
431
+ rest = rest[m.end():]
432
+ try:
433
+ v = map[var]
434
+ except KeyError:
435
+ raise InterpolationMissingOptionError(
436
+ option, section, rawval, var) from None
437
+ if "%" in v:
438
+ self._interpolate_some(parser, option, accum, v,
439
+ section, map, depth + 1)
440
+ else:
441
+ accum.append(v)
442
+ else:
443
+ raise InterpolationSyntaxError(
444
+ option, section,
445
+ "'%%' must be followed by '%%' or '(', "
446
+ "found: %r" % (rest,))
447
+
448
+
449
+ class ExtendedInterpolation(Interpolation):
450
+ """Advanced variant of interpolation, supports the syntax used by
451
+ `zc.buildout`. Enables interpolation between sections."""
452
+
453
+ _KEYCRE = re.compile(r"\$\{([^}]+)\}")
454
+
455
+ def before_get(self, parser, section, option, value, defaults):
456
+ L = []
457
+ self._interpolate_some(parser, option, L, value, section, defaults, 1)
458
+ return ''.join(L)
459
+
460
+ def before_set(self, parser, section, option, value):
461
+ tmp_value = value.replace('$$', '') # escaped dollar signs
462
+ tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
463
+ if '$' in tmp_value:
464
+ raise ValueError("invalid interpolation syntax in %r at "
465
+ "position %d" % (value, tmp_value.find('$')))
466
+ return value
467
+
468
+ def _interpolate_some(self, parser, option, accum, rest, section, map,
469
+ depth):
470
+ rawval = parser.get(section, option, raw=True, fallback=rest)
471
+ if depth > MAX_INTERPOLATION_DEPTH:
472
+ raise InterpolationDepthError(option, section, rawval)
473
+ while rest:
474
+ p = rest.find("$")
475
+ if p < 0:
476
+ accum.append(rest)
477
+ return
478
+ if p > 0:
479
+ accum.append(rest[:p])
480
+ rest = rest[p:]
481
+ # p is no longer used
482
+ c = rest[1:2]
483
+ if c == "$":
484
+ accum.append("$")
485
+ rest = rest[2:]
486
+ elif c == "{":
487
+ m = self._KEYCRE.match(rest)
488
+ if m is None:
489
+ raise InterpolationSyntaxError(option, section,
490
+ "bad interpolation variable reference %r" % rest)
491
+ path = m.group(1).split(':')
492
+ rest = rest[m.end():]
493
+ sect = section
494
+ opt = option
495
+ try:
496
+ if len(path) == 1:
497
+ opt = parser.optionxform(path[0])
498
+ v = map[opt]
499
+ elif len(path) == 2:
500
+ sect = path[0]
501
+ opt = parser.optionxform(path[1])
502
+ v = parser.get(sect, opt, raw=True)
503
+ else:
504
+ raise InterpolationSyntaxError(
505
+ option, section,
506
+ "More than one ':' found: %r" % (rest,))
507
+ except (KeyError, NoSectionError, NoOptionError):
508
+ raise InterpolationMissingOptionError(
509
+ option, section, rawval, ":".join(path)) from None
510
+ if "$" in v:
511
+ self._interpolate_some(parser, opt, accum, v, sect,
512
+ dict(parser.items(sect, raw=True)),
513
+ depth + 1)
514
+ else:
515
+ accum.append(v)
516
+ else:
517
+ raise InterpolationSyntaxError(
518
+ option, section,
519
+ "'$' must be followed by '$' or '{', "
520
+ "found: %r" % (rest,))
521
+
522
+
523
+ class LegacyInterpolation(Interpolation):
524
+ """Deprecated interpolation used in old versions of ConfigParser.
525
+ Use BasicInterpolation or ExtendedInterpolation instead."""
526
+
527
+ _KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
528
+
529
+ def before_get(self, parser, section, option, value, vars):
530
+ rawval = value
531
+ depth = MAX_INTERPOLATION_DEPTH
532
+ while depth: # Loop through this until it's done
533
+ depth -= 1
534
+ if value and "%(" in value:
535
+ replace = functools.partial(self._interpolation_replace,
536
+ parser=parser)
537
+ value = self._KEYCRE.sub(replace, value)
538
+ try:
539
+ value = value % vars
540
+ except KeyError as e:
541
+ raise InterpolationMissingOptionError(
542
+ option, section, rawval, e.args[0]) from None
543
+ else:
544
+ break
545
+ if value and "%(" in value:
546
+ raise InterpolationDepthError(option, section, rawval)
547
+ return value
548
+
549
+ def before_set(self, parser, section, option, value):
550
+ return value
551
+
552
+ @staticmethod
553
+ def _interpolation_replace(match, parser):
554
+ s = match.group(1)
555
+ if s is None:
556
+ return match.group()
557
+ else:
558
+ return "%%(%s)s" % parser.optionxform(s)
559
+
560
+
561
+ class RawConfigParser(MutableMapping):
562
+ """ConfigParser that does not do interpolation."""
563
+
564
+ # Regular expressions for parsing section headers and options
565
+ _SECT_TMPL = r"""
566
+ \[ # [
567
+ (?P<header>.+) # very permissive!
568
+ \] # ]
569
+ """
570
+ _OPT_TMPL = r"""
571
+ (?P<option>.*?) # very permissive!
572
+ \s*(?P<vi>{delim})\s* # any number of space/tab,
573
+ # followed by any of the
574
+ # allowed delimiters,
575
+ # followed by any space/tab
576
+ (?P<value>.*)$ # everything up to eol
577
+ """
578
+ _OPT_NV_TMPL = r"""
579
+ (?P<option>.*?) # very permissive!
580
+ \s*(?: # any number of space/tab,
581
+ (?P<vi>{delim})\s* # optionally followed by
582
+ # any of the allowed
583
+ # delimiters, followed by any
584
+ # space/tab
585
+ (?P<value>.*))?$ # everything up to eol
586
+ """
587
+ # Interpolation algorithm to be used if the user does not specify another
588
+ _DEFAULT_INTERPOLATION = Interpolation()
589
+ # Compiled regular expression for matching sections
590
+ SECTCRE = re.compile(_SECT_TMPL, re.VERBOSE)
591
+ # Compiled regular expression for matching options with typical separators
592
+ OPTCRE = re.compile(_OPT_TMPL.format(delim="=|:"), re.VERBOSE)
593
+ # Compiled regular expression for matching options with optional values
594
+ # delimited using typical separators
595
+ OPTCRE_NV = re.compile(_OPT_NV_TMPL.format(delim="=|:"), re.VERBOSE)
596
+ # Compiled regular expression for matching leading whitespace in a line
597
+ NONSPACECRE = re.compile(r"\S")
598
+ # Possible boolean values in the configuration.
599
+ BOOLEAN_STATES = {'1': True, 'yes': True, 'true': True, 'on': True,
600
+ '0': False, 'no': False, 'false': False, 'off': False}
601
+
602
+ def __init__(self, defaults=None, dict_type=_default_dict,
603
+ allow_no_value=False, *, delimiters=('=', ':'),
604
+ comment_prefixes=('#', ';'), inline_comment_prefixes=None,
605
+ strict=True, empty_lines_in_values=True,
606
+ default_section=DEFAULTSECT,
607
+ interpolation=_UNSET, converters=_UNSET):
608
+
609
+ self._dict = dict_type
610
+ self._sections = self._dict()
611
+ self._defaults = self._dict()
612
+ self._converters = ConverterMapping(self)
613
+ self._proxies = self._dict()
614
+ self._proxies[default_section] = SectionProxy(self, default_section)
615
+ self._delimiters = tuple(delimiters)
616
+ if delimiters == ('=', ':'):
617
+ self._optcre = self.OPTCRE_NV if allow_no_value else self.OPTCRE
618
+ else:
619
+ d = "|".join(re.escape(d) for d in delimiters)
620
+ if allow_no_value:
621
+ self._optcre = re.compile(self._OPT_NV_TMPL.format(delim=d),
622
+ re.VERBOSE)
623
+ else:
624
+ self._optcre = re.compile(self._OPT_TMPL.format(delim=d),
625
+ re.VERBOSE)
626
+ self._comment_prefixes = tuple(comment_prefixes or ())
627
+ self._inline_comment_prefixes = tuple(inline_comment_prefixes or ())
628
+ self._strict = strict
629
+ self._allow_no_value = allow_no_value
630
+ self._empty_lines_in_values = empty_lines_in_values
631
+ self.default_section=default_section
632
+ self._interpolation = interpolation
633
+ if self._interpolation is _UNSET:
634
+ self._interpolation = self._DEFAULT_INTERPOLATION
635
+ if self._interpolation is None:
636
+ self._interpolation = Interpolation()
637
+ if converters is not _UNSET:
638
+ self._converters.update(converters)
639
+ if defaults:
640
+ self._read_defaults(defaults)
641
+
642
+ def defaults(self):
643
+ return self._defaults
644
+
645
+ def sections(self):
646
+ """Return a list of section names, excluding [DEFAULT]"""
647
+ # self._sections will never have [DEFAULT] in it
648
+ return list(self._sections.keys())
649
+
650
+ def add_section(self, section):
651
+ """Create a new section in the configuration.
652
+
653
+ Raise DuplicateSectionError if a section by the specified name
654
+ already exists. Raise ValueError if name is DEFAULT.
655
+ """
656
+ if section == self.default_section:
657
+ raise ValueError('Invalid section name: %r' % section)
658
+
659
+ if section in self._sections:
660
+ raise DuplicateSectionError(section)
661
+ self._sections[section] = self._dict()
662
+ self._proxies[section] = SectionProxy(self, section)
663
+
664
+ def has_section(self, section):
665
+ """Indicate whether the named section is present in the configuration.
666
+
667
+ The DEFAULT section is not acknowledged.
668
+ """
669
+ return section in self._sections
670
+
671
+ def options(self, section):
672
+ """Return a list of option names for the given section name."""
673
+ try:
674
+ opts = self._sections[section].copy()
675
+ except KeyError:
676
+ raise NoSectionError(section) from None
677
+ opts.update(self._defaults)
678
+ return list(opts.keys())
679
+
680
+ def read(self, filenames, encoding=None):
681
+ """Read and parse a filename or an iterable of filenames.
682
+
683
+ Files that cannot be opened are silently ignored; this is
684
+ designed so that you can specify an iterable of potential
685
+ configuration file locations (e.g. current directory, user's
686
+ home directory, systemwide directory), and all existing
687
+ configuration files in the iterable will be read. A single
688
+ filename may also be given.
689
+
690
+ Return list of successfully read files.
691
+ """
692
+ if isinstance(filenames, (str, bytes, os.PathLike)):
693
+ filenames = [filenames]
694
+ encoding = io.text_encoding(encoding)
695
+ read_ok = []
696
+ for filename in filenames:
697
+ try:
698
+ with open(filename, encoding=encoding) as fp:
699
+ self._read(fp, filename)
700
+ except OSError:
701
+ continue
702
+ if isinstance(filename, os.PathLike):
703
+ filename = os.fspath(filename)
704
+ read_ok.append(filename)
705
+ return read_ok
706
+
707
+ def read_file(self, f, source=None):
708
+ """Like read() but the argument must be a file-like object.
709
+
710
+ The `f` argument must be iterable, returning one line at a time.
711
+ Optional second argument is the `source` specifying the name of the
712
+ file being read. If not given, it is taken from f.name. If `f` has no
713
+ `name` attribute, `<???>` is used.
714
+ """
715
+ if source is None:
716
+ try:
717
+ source = f.name
718
+ except AttributeError:
719
+ source = '<???>'
720
+ self._read(f, source)
721
+
722
+ def read_string(self, string, source='<string>'):
723
+ """Read configuration from a given string."""
724
+ sfile = io.StringIO(string)
725
+ self.read_file(sfile, source)
726
+
727
+ def read_dict(self, dictionary, source='<dict>'):
728
+ """Read configuration from a dictionary.
729
+
730
+ Keys are section names, values are dictionaries with keys and values
731
+ that should be present in the section. If the used dictionary type
732
+ preserves order, sections and their keys will be added in order.
733
+
734
+ All types held in the dictionary are converted to strings during
735
+ reading, including section names, option names and keys.
736
+
737
+ Optional second argument is the `source` specifying the name of the
738
+ dictionary being read.
739
+ """
740
+ elements_added = set()
741
+ for section, keys in dictionary.items():
742
+ section = str(section)
743
+ try:
744
+ self.add_section(section)
745
+ except (DuplicateSectionError, ValueError):
746
+ if self._strict and section in elements_added:
747
+ raise
748
+ elements_added.add(section)
749
+ for key, value in keys.items():
750
+ key = self.optionxform(str(key))
751
+ if value is not None:
752
+ value = str(value)
753
+ if self._strict and (section, key) in elements_added:
754
+ raise DuplicateOptionError(section, key, source)
755
+ elements_added.add((section, key))
756
+ self.set(section, key, value)
757
+
758
+ def readfp(self, fp, filename=None):
759
+ """Deprecated, use read_file instead."""
760
+ warnings.warn(
761
+ "This method will be removed in Python 3.12. "
762
+ "Use 'parser.read_file()' instead.",
763
+ DeprecationWarning, stacklevel=2
764
+ )
765
+ self.read_file(fp, source=filename)
766
+
767
+ def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
768
+ """Get an option value for a given section.
769
+
770
+ If `vars` is provided, it must be a dictionary. The option is looked up
771
+ in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order.
772
+ If the key is not found and `fallback` is provided, it is used as
773
+ a fallback value. `None` can be provided as a `fallback` value.
774
+
775
+ If interpolation is enabled and the optional argument `raw` is False,
776
+ all interpolations are expanded in the return values.
777
+
778
+ Arguments `raw`, `vars`, and `fallback` are keyword only.
779
+
780
+ The section DEFAULT is special.
781
+ """
782
+ try:
783
+ d = self._unify_values(section, vars)
784
+ except NoSectionError:
785
+ if fallback is _UNSET:
786
+ raise
787
+ else:
788
+ return fallback
789
+ option = self.optionxform(option)
790
+ try:
791
+ value = d[option]
792
+ except KeyError:
793
+ if fallback is _UNSET:
794
+ raise NoOptionError(option, section)
795
+ else:
796
+ return fallback
797
+
798
+ if raw or value is None:
799
+ return value
800
+ else:
801
+ return self._interpolation.before_get(self, section, option, value,
802
+ d)
803
+
804
+ def _get(self, section, conv, option, **kwargs):
805
+ return conv(self.get(section, option, **kwargs))
806
+
807
+ def _get_conv(self, section, option, conv, *, raw=False, vars=None,
808
+ fallback=_UNSET, **kwargs):
809
+ try:
810
+ return self._get(section, conv, option, raw=raw, vars=vars,
811
+ **kwargs)
812
+ except (NoSectionError, NoOptionError):
813
+ if fallback is _UNSET:
814
+ raise
815
+ return fallback
816
+
817
+ # getint, getfloat and getboolean provided directly for backwards compat
818
+ def getint(self, section, option, *, raw=False, vars=None,
819
+ fallback=_UNSET, **kwargs):
820
+ return self._get_conv(section, option, int, raw=raw, vars=vars,
821
+ fallback=fallback, **kwargs)
822
+
823
+ def getfloat(self, section, option, *, raw=False, vars=None,
824
+ fallback=_UNSET, **kwargs):
825
+ return self._get_conv(section, option, float, raw=raw, vars=vars,
826
+ fallback=fallback, **kwargs)
827
+
828
+ def getboolean(self, section, option, *, raw=False, vars=None,
829
+ fallback=_UNSET, **kwargs):
830
+ return self._get_conv(section, option, self._convert_to_boolean,
831
+ raw=raw, vars=vars, fallback=fallback, **kwargs)
832
+
833
+ def items(self, section=_UNSET, raw=False, vars=None):
834
+ """Return a list of (name, value) tuples for each option in a section.
835
+
836
+ All % interpolations are expanded in the return values, based on the
837
+ defaults passed into the constructor, unless the optional argument
838
+ `raw` is true. Additional substitutions may be provided using the
839
+ `vars` argument, which must be a dictionary whose contents overrides
840
+ any pre-existing defaults.
841
+
842
+ The section DEFAULT is special.
843
+ """
844
+ if section is _UNSET:
845
+ return super().items()
846
+ d = self._defaults.copy()
847
+ try:
848
+ d.update(self._sections[section])
849
+ except KeyError:
850
+ if section != self.default_section:
851
+ raise NoSectionError(section)
852
+ orig_keys = list(d.keys())
853
+ # Update with the entry specific variables
854
+ if vars:
855
+ for key, value in vars.items():
856
+ d[self.optionxform(key)] = value
857
+ value_getter = lambda option: self._interpolation.before_get(self,
858
+ section, option, d[option], d)
859
+ if raw:
860
+ value_getter = lambda option: d[option]
861
+ return [(option, value_getter(option)) for option in orig_keys]
862
+
863
+ def popitem(self):
864
+ """Remove a section from the parser and return it as
865
+ a (section_name, section_proxy) tuple. If no section is present, raise
866
+ KeyError.
867
+
868
+ The section DEFAULT is never returned because it cannot be removed.
869
+ """
870
+ for key in self.sections():
871
+ value = self[key]
872
+ del self[key]
873
+ return key, value
874
+ raise KeyError
875
+
876
+ def optionxform(self, optionstr):
877
+ return optionstr.lower()
878
+
879
+ def has_option(self, section, option):
880
+ """Check for the existence of a given option in a given section.
881
+ If the specified `section` is None or an empty string, DEFAULT is
882
+ assumed. If the specified `section` does not exist, returns False."""
883
+ if not section or section == self.default_section:
884
+ option = self.optionxform(option)
885
+ return option in self._defaults
886
+ elif section not in self._sections:
887
+ return False
888
+ else:
889
+ option = self.optionxform(option)
890
+ return (option in self._sections[section]
891
+ or option in self._defaults)
892
+
893
+ def set(self, section, option, value=None):
894
+ """Set an option."""
895
+ if value:
896
+ value = self._interpolation.before_set(self, section, option,
897
+ value)
898
+ if not section or section == self.default_section:
899
+ sectdict = self._defaults
900
+ else:
901
+ try:
902
+ sectdict = self._sections[section]
903
+ except KeyError:
904
+ raise NoSectionError(section) from None
905
+ sectdict[self.optionxform(option)] = value
906
+
907
+ def write(self, fp, space_around_delimiters=True):
908
+ """Write an .ini-format representation of the configuration state.
909
+
910
+ If `space_around_delimiters` is True (the default), delimiters
911
+ between keys and values are surrounded by spaces.
912
+
913
+ Please note that comments in the original configuration file are not
914
+ preserved when writing the configuration back.
915
+ """
916
+ if space_around_delimiters:
917
+ d = " {} ".format(self._delimiters[0])
918
+ else:
919
+ d = self._delimiters[0]
920
+ if self._defaults:
921
+ self._write_section(fp, self.default_section,
922
+ self._defaults.items(), d)
923
+ for section in self._sections:
924
+ self._write_section(fp, section,
925
+ self._sections[section].items(), d)
926
+
927
+ def _write_section(self, fp, section_name, section_items, delimiter):
928
+ """Write a single section to the specified `fp`."""
929
+ fp.write("[{}]\n".format(section_name))
930
+ for key, value in section_items:
931
+ value = self._interpolation.before_write(self, section_name, key,
932
+ value)
933
+ if value is not None or not self._allow_no_value:
934
+ value = delimiter + str(value).replace('\n', '\n\t')
935
+ else:
936
+ value = ""
937
+ fp.write("{}{}\n".format(key, value))
938
+ fp.write("\n")
939
+
940
+ def remove_option(self, section, option):
941
+ """Remove an option."""
942
+ if not section or section == self.default_section:
943
+ sectdict = self._defaults
944
+ else:
945
+ try:
946
+ sectdict = self._sections[section]
947
+ except KeyError:
948
+ raise NoSectionError(section) from None
949
+ option = self.optionxform(option)
950
+ existed = option in sectdict
951
+ if existed:
952
+ del sectdict[option]
953
+ return existed
954
+
955
+ def remove_section(self, section):
956
+ """Remove a file section."""
957
+ existed = section in self._sections
958
+ if existed:
959
+ del self._sections[section]
960
+ del self._proxies[section]
961
+ return existed
962
+
963
+ def __getitem__(self, key):
964
+ if key != self.default_section and not self.has_section(key):
965
+ raise KeyError(key)
966
+ return self._proxies[key]
967
+
968
+ def __setitem__(self, key, value):
969
+ # To conform with the mapping protocol, overwrites existing values in
970
+ # the section.
971
+ if key in self and self[key] is value:
972
+ return
973
+ # XXX this is not atomic if read_dict fails at any point. Then again,
974
+ # no update method in configparser is atomic in this implementation.
975
+ if key == self.default_section:
976
+ self._defaults.clear()
977
+ elif key in self._sections:
978
+ self._sections[key].clear()
979
+ self.read_dict({key: value})
980
+
981
+ def __delitem__(self, key):
982
+ if key == self.default_section:
983
+ raise ValueError("Cannot remove the default section.")
984
+ if not self.has_section(key):
985
+ raise KeyError(key)
986
+ self.remove_section(key)
987
+
988
+ def __contains__(self, key):
989
+ return key == self.default_section or self.has_section(key)
990
+
991
+ def __len__(self):
992
+ return len(self._sections) + 1 # the default section
993
+
994
+ def __iter__(self):
995
+ # XXX does it break when underlying container state changed?
996
+ return itertools.chain((self.default_section,), self._sections.keys())
997
+
998
+ def _read(self, fp, fpname):
999
+ """Parse a sectioned configuration file.
1000
+
1001
+ Each section in a configuration file contains a header, indicated by
1002
+ a name in square brackets (`[]`), plus key/value options, indicated by
1003
+ `name` and `value` delimited with a specific substring (`=` or `:` by
1004
+ default).
1005
+
1006
+ Values can span multiple lines, as long as they are indented deeper
1007
+ than the first line of the value. Depending on the parser's mode, blank
1008
+ lines may be treated as parts of multiline values or ignored.
1009
+
1010
+ Configuration files may include comments, prefixed by specific
1011
+ characters (`#` and `;` by default). Comments may appear on their own
1012
+ in an otherwise empty line or may be entered in lines holding values or
1013
+ section names. Please note that comments get stripped off when reading configuration files.
1014
+ """
1015
+ elements_added = set()
1016
+ cursect = None # None, or a dictionary
1017
+ sectname = None
1018
+ optname = None
1019
+ lineno = 0
1020
+ indent_level = 0
1021
+ e = None # None, or an exception
1022
+ for lineno, line in enumerate(fp, start=1):
1023
+ comment_start = sys.maxsize
1024
+ # strip inline comments
1025
+ inline_prefixes = {p: -1 for p in self._inline_comment_prefixes}
1026
+ while comment_start == sys.maxsize and inline_prefixes:
1027
+ next_prefixes = {}
1028
+ for prefix, index in inline_prefixes.items():
1029
+ index = line.find(prefix, index+1)
1030
+ if index == -1:
1031
+ continue
1032
+ next_prefixes[prefix] = index
1033
+ if index == 0 or (index > 0 and line[index-1].isspace()):
1034
+ comment_start = min(comment_start, index)
1035
+ inline_prefixes = next_prefixes
1036
+ # strip full line comments
1037
+ for prefix in self._comment_prefixes:
1038
+ if line.strip().startswith(prefix):
1039
+ comment_start = 0
1040
+ break
1041
+ if comment_start == sys.maxsize:
1042
+ comment_start = None
1043
+ value = line[:comment_start].strip()
1044
+ if not value:
1045
+ if self._empty_lines_in_values:
1046
+ # add empty line to the value, but only if there was no
1047
+ # comment on the line
1048
+ if (comment_start is None and
1049
+ cursect is not None and
1050
+ optname and
1051
+ cursect[optname] is not None):
1052
+ cursect[optname].append('') # newlines added at join
1053
+ else:
1054
+ # empty line marks end of value
1055
+ indent_level = sys.maxsize
1056
+ continue
1057
+ # continuation line?
1058
+ first_nonspace = self.NONSPACECRE.search(line)
1059
+ cur_indent_level = first_nonspace.start() if first_nonspace else 0
1060
+ if (cursect is not None and optname and
1061
+ cur_indent_level > indent_level):
1062
+ cursect[optname].append(value)
1063
+ # a section header or option header?
1064
+ else:
1065
+ indent_level = cur_indent_level
1066
+ # is it a section header?
1067
+ mo = self.SECTCRE.match(value)
1068
+ if mo:
1069
+ sectname = mo.group('header')
1070
+ if sectname in self._sections:
1071
+ if self._strict and sectname in elements_added:
1072
+ raise DuplicateSectionError(sectname, fpname,
1073
+ lineno)
1074
+ cursect = self._sections[sectname]
1075
+ elements_added.add(sectname)
1076
+ elif sectname == self.default_section:
1077
+ cursect = self._defaults
1078
+ else:
1079
+ cursect = self._dict()
1080
+ self._sections[sectname] = cursect
1081
+ self._proxies[sectname] = SectionProxy(self, sectname)
1082
+ elements_added.add(sectname)
1083
+ # So sections can't start with a continuation line
1084
+ optname = None
1085
+ # no section header in the file?
1086
+ elif cursect is None:
1087
+ raise MissingSectionHeaderError(fpname, lineno, line)
1088
+ # an option line?
1089
+ else:
1090
+ mo = self._optcre.match(value)
1091
+ if mo:
1092
+ optname, vi, optval = mo.group('option', 'vi', 'value')
1093
+ if not optname:
1094
+ e = self._handle_error(e, fpname, lineno, line)
1095
+ optname = self.optionxform(optname.rstrip())
1096
+ if (self._strict and
1097
+ (sectname, optname) in elements_added):
1098
+ raise DuplicateOptionError(sectname, optname,
1099
+ fpname, lineno)
1100
+ elements_added.add((sectname, optname))
1101
+ # This check is fine because the OPTCRE cannot
1102
+ # match if it would set optval to None
1103
+ if optval is not None:
1104
+ optval = optval.strip()
1105
+ cursect[optname] = [optval]
1106
+ else:
1107
+ # valueless option handling
1108
+ cursect[optname] = None
1109
+ else:
1110
+ # a non-fatal parsing error occurred. set up the
1111
+ # exception but keep going. the exception will be
1112
+ # raised at the end of the file and will contain a
1113
+ # list of all bogus lines
1114
+ e = self._handle_error(e, fpname, lineno, line)
1115
+ self._join_multiline_values()
1116
+ # if any parsing errors occurred, raise an exception
1117
+ if e:
1118
+ raise e
1119
+
1120
+ def _join_multiline_values(self):
1121
+ defaults = self.default_section, self._defaults
1122
+ all_sections = itertools.chain((defaults,),
1123
+ self._sections.items())
1124
+ for section, options in all_sections:
1125
+ for name, val in options.items():
1126
+ if isinstance(val, list):
1127
+ val = '\n'.join(val).rstrip()
1128
+ options[name] = self._interpolation.before_read(self,
1129
+ section,
1130
+ name, val)
1131
+
1132
+ def _read_defaults(self, defaults):
1133
+ """Read the defaults passed in the initializer.
1134
+ Note: values can be non-string."""
1135
+ for key, value in defaults.items():
1136
+ self._defaults[self.optionxform(key)] = value
1137
+
1138
+ def _handle_error(self, exc, fpname, lineno, line):
1139
+ if not exc:
1140
+ exc = ParsingError(fpname)
1141
+ exc.append(lineno, repr(line))
1142
+ return exc
1143
+
1144
+ def _unify_values(self, section, vars):
1145
+ """Create a sequence of lookups with 'vars' taking priority over
1146
+ the 'section' which takes priority over the DEFAULTSECT.
1147
+
1148
+ """
1149
+ sectiondict = {}
1150
+ try:
1151
+ sectiondict = self._sections[section]
1152
+ except KeyError:
1153
+ if section != self.default_section:
1154
+ raise NoSectionError(section) from None
1155
+ # Update with the entry specific variables
1156
+ vardict = {}
1157
+ if vars:
1158
+ for key, value in vars.items():
1159
+ if value is not None:
1160
+ value = str(value)
1161
+ vardict[self.optionxform(key)] = value
1162
+ return _ChainMap(vardict, sectiondict, self._defaults)
1163
+
1164
+ def _convert_to_boolean(self, value):
1165
+ """Return a boolean value translating from other types if necessary.
1166
+ """
1167
+ if value.lower() not in self.BOOLEAN_STATES:
1168
+ raise ValueError('Not a boolean: %s' % value)
1169
+ return self.BOOLEAN_STATES[value.lower()]
1170
+
1171
+ def _validate_value_types(self, *, section="", option="", value=""):
1172
+ """Raises a TypeError for non-string values.
1173
+
1174
+ The only legal non-string value if we allow valueless
1175
+ options is None, so we need to check if the value is a
1176
+ string if:
1177
+ - we do not allow valueless options, or
1178
+ - we allow valueless options but the value is not None
1179
+
1180
+ For compatibility reasons this method is not used in classic set()
1181
+ for RawConfigParsers. It is invoked in every case for mapping protocol
1182
+ access and in ConfigParser.set().
1183
+ """
1184
+ if not isinstance(section, str):
1185
+ raise TypeError("section names must be strings")
1186
+ if not isinstance(option, str):
1187
+ raise TypeError("option keys must be strings")
1188
+ if not self._allow_no_value or value:
1189
+ if not isinstance(value, str):
1190
+ raise TypeError("option values must be strings")
1191
+
1192
+ @property
1193
+ def converters(self):
1194
+ return self._converters
1195
+
1196
+
1197
+ class ConfigParser(RawConfigParser):
1198
+ """ConfigParser implementing interpolation."""
1199
+
1200
+ _DEFAULT_INTERPOLATION = BasicInterpolation()
1201
+
1202
+ def set(self, section, option, value=None):
1203
+ """Set an option. Extends RawConfigParser.set by validating type and
1204
+ interpolation syntax on the value."""
1205
+ self._validate_value_types(option=option, value=value)
1206
+ super().set(section, option, value)
1207
+
1208
+ def add_section(self, section):
1209
+ """Create a new section in the configuration. Extends
1210
+ RawConfigParser.add_section by validating if the section name is
1211
+ a string."""
1212
+ self._validate_value_types(section=section)
1213
+ super().add_section(section)
1214
+
1215
+ def _read_defaults(self, defaults):
1216
+ """Reads the defaults passed in the initializer, implicitly converting
1217
+ values to strings like the rest of the API.
1218
+
1219
+ Does not perform interpolation for backwards compatibility.
1220
+ """
1221
+ try:
1222
+ hold_interpolation = self._interpolation
1223
+ self._interpolation = Interpolation()
1224
+ self.read_dict({self.default_section: defaults})
1225
+ finally:
1226
+ self._interpolation = hold_interpolation
1227
+
1228
+
1229
+ class SafeConfigParser(ConfigParser):
1230
+ """ConfigParser alias for backwards compatibility purposes."""
1231
+
1232
+ def __init__(self, *args, **kwargs):
1233
+ super().__init__(*args, **kwargs)
1234
+ warnings.warn(
1235
+ "The SafeConfigParser class has been renamed to ConfigParser "
1236
+ "in Python 3.2. This alias will be removed in Python 3.12."
1237
+ " Use ConfigParser directly instead.",
1238
+ DeprecationWarning, stacklevel=2
1239
+ )
1240
+
1241
+
1242
+ class SectionProxy(MutableMapping):
1243
+ """A proxy for a single section from a parser."""
1244
+
1245
+ def __init__(self, parser, name):
1246
+ """Creates a view on a section of the specified `name` in `parser`."""
1247
+ self._parser = parser
1248
+ self._name = name
1249
+ for conv in parser.converters:
1250
+ key = 'get' + conv
1251
+ getter = functools.partial(self.get, _impl=getattr(parser, key))
1252
+ setattr(self, key, getter)
1253
+
1254
+ def __repr__(self):
1255
+ return '<Section: {}>'.format(self._name)
1256
+
1257
+ def __getitem__(self, key):
1258
+ if not self._parser.has_option(self._name, key):
1259
+ raise KeyError(key)
1260
+ return self._parser.get(self._name, key)
1261
+
1262
+ def __setitem__(self, key, value):
1263
+ self._parser._validate_value_types(option=key, value=value)
1264
+ return self._parser.set(self._name, key, value)
1265
+
1266
+ def __delitem__(self, key):
1267
+ if not (self._parser.has_option(self._name, key) and
1268
+ self._parser.remove_option(self._name, key)):
1269
+ raise KeyError(key)
1270
+
1271
+ def __contains__(self, key):
1272
+ return self._parser.has_option(self._name, key)
1273
+
1274
+ def __len__(self):
1275
+ return len(self._options())
1276
+
1277
+ def __iter__(self):
1278
+ return self._options().__iter__()
1279
+
1280
+ def _options(self):
1281
+ if self._name != self._parser.default_section:
1282
+ return self._parser.options(self._name)
1283
+ else:
1284
+ return self._parser.defaults()
1285
+
1286
+ @property
1287
+ def parser(self):
1288
+ # The parser object of the proxy is read-only.
1289
+ return self._parser
1290
+
1291
+ @property
1292
+ def name(self):
1293
+ # The name of the section on a proxy is read-only.
1294
+ return self._name
1295
+
1296
+ def get(self, option, fallback=None, *, raw=False, vars=None,
1297
+ _impl=None, **kwargs):
1298
+ """Get an option value.
1299
+
1300
+ Unless `fallback` is provided, `None` will be returned if the option
1301
+ is not found.
1302
+
1303
+ """
1304
+ # If `_impl` is provided, it should be a getter method on the parser
1305
+ # object that provides the desired type conversion.
1306
+ if not _impl:
1307
+ _impl = self._parser.get
1308
+ return _impl(self._name, option, raw=raw, vars=vars,
1309
+ fallback=fallback, **kwargs)
1310
+
1311
+
1312
+ class ConverterMapping(MutableMapping):
1313
+ """Enables reuse of get*() methods between the parser and section proxies.
1314
+
1315
+ If a parser class implements a getter directly, the value for the given
1316
+ key will be ``None``. The presence of the converter name here enables
1317
+ section proxies to find and use the implementation on the parser class.
1318
+ """
1319
+
1320
+ GETTERCRE = re.compile(r"^get(?P<name>.+)$")
1321
+
1322
+ def __init__(self, parser):
1323
+ self._parser = parser
1324
+ self._data = {}
1325
+ for getter in dir(self._parser):
1326
+ m = self.GETTERCRE.match(getter)
1327
+ if not m or not callable(getattr(self._parser, getter)):
1328
+ continue
1329
+ self._data[m.group('name')] = None # See class docstring.
1330
+
1331
+ def __getitem__(self, key):
1332
+ return self._data[key]
1333
+
1334
+ def __setitem__(self, key, value):
1335
+ try:
1336
+ k = 'get' + key
1337
+ except TypeError:
1338
+ raise ValueError('Incompatible key: {} (type: {})'
1339
+ ''.format(key, type(key)))
1340
+ if k == 'get':
1341
+ raise ValueError('Incompatible key: cannot use "" as a name')
1342
+ self._data[key] = value
1343
+ func = functools.partial(self._parser._get_conv, conv=value)
1344
+ func.converter = value
1345
+ setattr(self._parser, k, func)
1346
+ for proxy in self._parser.values():
1347
+ getter = functools.partial(proxy.get, _impl=func)
1348
+ setattr(proxy, k, getter)
1349
+
1350
+ def __delitem__(self, key):
1351
+ try:
1352
+ k = 'get' + (key or None)
1353
+ except TypeError:
1354
+ raise KeyError(key)
1355
+ del self._data[key]
1356
+ for inst in itertools.chain((self._parser,), self._parser.values()):
1357
+ try:
1358
+ delattr(inst, k)
1359
+ except AttributeError:
1360
+ # don't raise since the entry was present in _data, silently
1361
+ # clean up
1362
+ continue
1363
+
1364
+ def __iter__(self):
1365
+ return iter(self._data)
1366
+
1367
+ def __len__(self):
1368
+ return len(self._data)
janus/lib/python3.10/contextlib.py ADDED
@@ -0,0 +1,745 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Utilities for with-statement contexts. See PEP 343."""
2
+ import abc
3
+ import sys
4
+ import _collections_abc
5
+ from collections import deque
6
+ from functools import wraps
7
+ from types import MethodType, GenericAlias
8
+
9
+ __all__ = ["asynccontextmanager", "contextmanager", "closing", "nullcontext",
10
+ "AbstractContextManager", "AbstractAsyncContextManager",
11
+ "AsyncExitStack", "ContextDecorator", "ExitStack",
12
+ "redirect_stdout", "redirect_stderr", "suppress", "aclosing"]
13
+
14
+
15
+ class AbstractContextManager(abc.ABC):
16
+
17
+ """An abstract base class for context managers."""
18
+
19
+ __class_getitem__ = classmethod(GenericAlias)
20
+
21
+ def __enter__(self):
22
+ """Return `self` upon entering the runtime context."""
23
+ return self
24
+
25
+ @abc.abstractmethod
26
+ def __exit__(self, exc_type, exc_value, traceback):
27
+ """Raise any exception triggered within the runtime context."""
28
+ return None
29
+
30
+ @classmethod
31
+ def __subclasshook__(cls, C):
32
+ if cls is AbstractContextManager:
33
+ return _collections_abc._check_methods(C, "__enter__", "__exit__")
34
+ return NotImplemented
35
+
36
+
37
+ class AbstractAsyncContextManager(abc.ABC):
38
+
39
+ """An abstract base class for asynchronous context managers."""
40
+
41
+ __class_getitem__ = classmethod(GenericAlias)
42
+
43
+ async def __aenter__(self):
44
+ """Return `self` upon entering the runtime context."""
45
+ return self
46
+
47
+ @abc.abstractmethod
48
+ async def __aexit__(self, exc_type, exc_value, traceback):
49
+ """Raise any exception triggered within the runtime context."""
50
+ return None
51
+
52
+ @classmethod
53
+ def __subclasshook__(cls, C):
54
+ if cls is AbstractAsyncContextManager:
55
+ return _collections_abc._check_methods(C, "__aenter__",
56
+ "__aexit__")
57
+ return NotImplemented
58
+
59
+
60
+ class ContextDecorator(object):
61
+ "A base class or mixin that enables context managers to work as decorators."
62
+
63
+ def _recreate_cm(self):
64
+ """Return a recreated instance of self.
65
+
66
+ Allows an otherwise one-shot context manager like
67
+ _GeneratorContextManager to support use as
68
+ a decorator via implicit recreation.
69
+
70
+ This is a private interface just for _GeneratorContextManager.
71
+ See issue #11647 for details.
72
+ """
73
+ return self
74
+
75
+ def __call__(self, func):
76
+ @wraps(func)
77
+ def inner(*args, **kwds):
78
+ with self._recreate_cm():
79
+ return func(*args, **kwds)
80
+ return inner
81
+
82
+
83
+ class AsyncContextDecorator(object):
84
+ "A base class or mixin that enables async context managers to work as decorators."
85
+
86
+ def _recreate_cm(self):
87
+ """Return a recreated instance of self.
88
+ """
89
+ return self
90
+
91
+ def __call__(self, func):
92
+ @wraps(func)
93
+ async def inner(*args, **kwds):
94
+ async with self._recreate_cm():
95
+ return await func(*args, **kwds)
96
+ return inner
97
+
98
+
99
+ class _GeneratorContextManagerBase:
100
+ """Shared functionality for @contextmanager and @asynccontextmanager."""
101
+
102
+ def __init__(self, func, args, kwds):
103
+ self.gen = func(*args, **kwds)
104
+ self.func, self.args, self.kwds = func, args, kwds
105
+ # Issue 19330: ensure context manager instances have good docstrings
106
+ doc = getattr(func, "__doc__", None)
107
+ if doc is None:
108
+ doc = type(self).__doc__
109
+ self.__doc__ = doc
110
+ # Unfortunately, this still doesn't provide good help output when
111
+ # inspecting the created context manager instances, since pydoc
112
+ # currently bypasses the instance docstring and shows the docstring
113
+ # for the class instead.
114
+ # See http://bugs.python.org/issue19404 for more details.
115
+
116
+ def _recreate_cm(self):
117
+ # _GCMB instances are one-shot context managers, so the
118
+ # CM must be recreated each time a decorated function is
119
+ # called
120
+ return self.__class__(self.func, self.args, self.kwds)
121
+
122
+
123
+ class _GeneratorContextManager(
124
+ _GeneratorContextManagerBase,
125
+ AbstractContextManager,
126
+ ContextDecorator,
127
+ ):
128
+ """Helper for @contextmanager decorator."""
129
+
130
+ def __enter__(self):
131
+ # do not keep args and kwds alive unnecessarily
132
+ # they are only needed for recreation, which is not possible anymore
133
+ del self.args, self.kwds, self.func
134
+ try:
135
+ return next(self.gen)
136
+ except StopIteration:
137
+ raise RuntimeError("generator didn't yield") from None
138
+
139
+ def __exit__(self, typ, value, traceback):
140
+ if typ is None:
141
+ try:
142
+ next(self.gen)
143
+ except StopIteration:
144
+ return False
145
+ else:
146
+ raise RuntimeError("generator didn't stop")
147
+ else:
148
+ if value is None:
149
+ # Need to force instantiation so we can reliably
150
+ # tell if we get the same exception back
151
+ value = typ()
152
+ try:
153
+ self.gen.throw(typ, value, traceback)
154
+ except StopIteration as exc:
155
+ # Suppress StopIteration *unless* it's the same exception that
156
+ # was passed to throw(). This prevents a StopIteration
157
+ # raised inside the "with" statement from being suppressed.
158
+ return exc is not value
159
+ except RuntimeError as exc:
160
+ # Don't re-raise the passed in exception. (issue27122)
161
+ if exc is value:
162
+ return False
163
+ # Avoid suppressing if a StopIteration exception
164
+ # was passed to throw() and later wrapped into a RuntimeError
165
+ # (see PEP 479 for sync generators; async generators also
166
+ # have this behavior). But do this only if the exception wrapped
167
+ # by the RuntimeError is actually Stop(Async)Iteration (see
168
+ # issue29692).
169
+ if (
170
+ isinstance(value, StopIteration)
171
+ and exc.__cause__ is value
172
+ ):
173
+ return False
174
+ raise
175
+ except BaseException as exc:
176
+ # only re-raise if it's *not* the exception that was
177
+ # passed to throw(), because __exit__() must not raise
178
+ # an exception unless __exit__() itself failed. But throw()
179
+ # has to raise the exception to signal propagation, so this
180
+ # fixes the impedance mismatch between the throw() protocol
181
+ # and the __exit__() protocol.
182
+ if exc is not value:
183
+ raise
184
+ return False
185
+ raise RuntimeError("generator didn't stop after throw()")
186
+
187
+ class _AsyncGeneratorContextManager(
188
+ _GeneratorContextManagerBase,
189
+ AbstractAsyncContextManager,
190
+ AsyncContextDecorator,
191
+ ):
192
+ """Helper for @asynccontextmanager decorator."""
193
+
194
+ async def __aenter__(self):
195
+ # do not keep args and kwds alive unnecessarily
196
+ # they are only needed for recreation, which is not possible anymore
197
+ del self.args, self.kwds, self.func
198
+ try:
199
+ return await anext(self.gen)
200
+ except StopAsyncIteration:
201
+ raise RuntimeError("generator didn't yield") from None
202
+
203
+ async def __aexit__(self, typ, value, traceback):
204
+ if typ is None:
205
+ try:
206
+ await anext(self.gen)
207
+ except StopAsyncIteration:
208
+ return False
209
+ else:
210
+ raise RuntimeError("generator didn't stop")
211
+ else:
212
+ if value is None:
213
+ # Need to force instantiation so we can reliably
214
+ # tell if we get the same exception back
215
+ value = typ()
216
+ try:
217
+ await self.gen.athrow(typ, value, traceback)
218
+ except StopAsyncIteration as exc:
219
+ # Suppress StopIteration *unless* it's the same exception that
220
+ # was passed to throw(). This prevents a StopIteration
221
+ # raised inside the "with" statement from being suppressed.
222
+ return exc is not value
223
+ except RuntimeError as exc:
224
+ # Don't re-raise the passed in exception. (issue27122)
225
+ if exc is value:
226
+ return False
227
+ # Avoid suppressing if a Stop(Async)Iteration exception
228
+ # was passed to athrow() and later wrapped into a RuntimeError
229
+ # (see PEP 479 for sync generators; async generators also
230
+ # have this behavior). But do this only if the exception wrapped
231
+ # by the RuntimeError is actully Stop(Async)Iteration (see
232
+ # issue29692).
233
+ if (
234
+ isinstance(value, (StopIteration, StopAsyncIteration))
235
+ and exc.__cause__ is value
236
+ ):
237
+ return False
238
+ raise
239
+ except BaseException as exc:
240
+ # only re-raise if it's *not* the exception that was
241
+ # passed to throw(), because __exit__() must not raise
242
+ # an exception unless __exit__() itself failed. But throw()
243
+ # has to raise the exception to signal propagation, so this
244
+ # fixes the impedance mismatch between the throw() protocol
245
+ # and the __exit__() protocol.
246
+ if exc is not value:
247
+ raise
248
+ return False
249
+ raise RuntimeError("generator didn't stop after athrow()")
250
+
251
+
252
+ def contextmanager(func):
253
+ """@contextmanager decorator.
254
+
255
+ Typical usage:
256
+
257
+ @contextmanager
258
+ def some_generator(<arguments>):
259
+ <setup>
260
+ try:
261
+ yield <value>
262
+ finally:
263
+ <cleanup>
264
+
265
+ This makes this:
266
+
267
+ with some_generator(<arguments>) as <variable>:
268
+ <body>
269
+
270
+ equivalent to this:
271
+
272
+ <setup>
273
+ try:
274
+ <variable> = <value>
275
+ <body>
276
+ finally:
277
+ <cleanup>
278
+ """
279
+ @wraps(func)
280
+ def helper(*args, **kwds):
281
+ return _GeneratorContextManager(func, args, kwds)
282
+ return helper
283
+
284
+
285
+ def asynccontextmanager(func):
286
+ """@asynccontextmanager decorator.
287
+
288
+ Typical usage:
289
+
290
+ @asynccontextmanager
291
+ async def some_async_generator(<arguments>):
292
+ <setup>
293
+ try:
294
+ yield <value>
295
+ finally:
296
+ <cleanup>
297
+
298
+ This makes this:
299
+
300
+ async with some_async_generator(<arguments>) as <variable>:
301
+ <body>
302
+
303
+ equivalent to this:
304
+
305
+ <setup>
306
+ try:
307
+ <variable> = <value>
308
+ <body>
309
+ finally:
310
+ <cleanup>
311
+ """
312
+ @wraps(func)
313
+ def helper(*args, **kwds):
314
+ return _AsyncGeneratorContextManager(func, args, kwds)
315
+ return helper
316
+
317
+
318
+ class closing(AbstractContextManager):
319
+ """Context to automatically close something at the end of a block.
320
+
321
+ Code like this:
322
+
323
+ with closing(<module>.open(<arguments>)) as f:
324
+ <block>
325
+
326
+ is equivalent to this:
327
+
328
+ f = <module>.open(<arguments>)
329
+ try:
330
+ <block>
331
+ finally:
332
+ f.close()
333
+
334
+ """
335
+ def __init__(self, thing):
336
+ self.thing = thing
337
+ def __enter__(self):
338
+ return self.thing
339
+ def __exit__(self, *exc_info):
340
+ self.thing.close()
341
+
342
+
343
+ class aclosing(AbstractAsyncContextManager):
344
+ """Async context manager for safely finalizing an asynchronously cleaned-up
345
+ resource such as an async generator, calling its ``aclose()`` method.
346
+
347
+ Code like this:
348
+
349
+ async with aclosing(<module>.fetch(<arguments>)) as agen:
350
+ <block>
351
+
352
+ is equivalent to this:
353
+
354
+ agen = <module>.fetch(<arguments>)
355
+ try:
356
+ <block>
357
+ finally:
358
+ await agen.aclose()
359
+
360
+ """
361
+ def __init__(self, thing):
362
+ self.thing = thing
363
+ async def __aenter__(self):
364
+ return self.thing
365
+ async def __aexit__(self, *exc_info):
366
+ await self.thing.aclose()
367
+
368
+
369
+ class _RedirectStream(AbstractContextManager):
370
+
371
+ _stream = None
372
+
373
+ def __init__(self, new_target):
374
+ self._new_target = new_target
375
+ # We use a list of old targets to make this CM re-entrant
376
+ self._old_targets = []
377
+
378
+ def __enter__(self):
379
+ self._old_targets.append(getattr(sys, self._stream))
380
+ setattr(sys, self._stream, self._new_target)
381
+ return self._new_target
382
+
383
+ def __exit__(self, exctype, excinst, exctb):
384
+ setattr(sys, self._stream, self._old_targets.pop())
385
+
386
+
387
+ class redirect_stdout(_RedirectStream):
388
+ """Context manager for temporarily redirecting stdout to another file.
389
+
390
+ # How to send help() to stderr
391
+ with redirect_stdout(sys.stderr):
392
+ help(dir)
393
+
394
+ # How to write help() to a file
395
+ with open('help.txt', 'w') as f:
396
+ with redirect_stdout(f):
397
+ help(pow)
398
+ """
399
+
400
+ _stream = "stdout"
401
+
402
+
403
+ class redirect_stderr(_RedirectStream):
404
+ """Context manager for temporarily redirecting stderr to another file."""
405
+
406
+ _stream = "stderr"
407
+
408
+
409
+ class suppress(AbstractContextManager):
410
+ """Context manager to suppress specified exceptions
411
+
412
+ After the exception is suppressed, execution proceeds with the next
413
+ statement following the with statement.
414
+
415
+ with suppress(FileNotFoundError):
416
+ os.remove(somefile)
417
+ # Execution still resumes here if the file was already removed
418
+ """
419
+
420
+ def __init__(self, *exceptions):
421
+ self._exceptions = exceptions
422
+
423
+ def __enter__(self):
424
+ pass
425
+
426
+ def __exit__(self, exctype, excinst, exctb):
427
+ # Unlike isinstance and issubclass, CPython exception handling
428
+ # currently only looks at the concrete type hierarchy (ignoring
429
+ # the instance and subclass checking hooks). While Guido considers
430
+ # that a bug rather than a feature, it's a fairly hard one to fix
431
+ # due to various internal implementation details. suppress provides
432
+ # the simpler issubclass based semantics, rather than trying to
433
+ # exactly reproduce the limitations of the CPython interpreter.
434
+ #
435
+ # See http://bugs.python.org/issue12029 for more details
436
+ return exctype is not None and issubclass(exctype, self._exceptions)
437
+
438
+
439
+ class _BaseExitStack:
440
+ """A base class for ExitStack and AsyncExitStack."""
441
+
442
+ @staticmethod
443
+ def _create_exit_wrapper(cm, cm_exit):
444
+ return MethodType(cm_exit, cm)
445
+
446
+ @staticmethod
447
+ def _create_cb_wrapper(callback, /, *args, **kwds):
448
+ def _exit_wrapper(exc_type, exc, tb):
449
+ callback(*args, **kwds)
450
+ return _exit_wrapper
451
+
452
+ def __init__(self):
453
+ self._exit_callbacks = deque()
454
+
455
+ def pop_all(self):
456
+ """Preserve the context stack by transferring it to a new instance."""
457
+ new_stack = type(self)()
458
+ new_stack._exit_callbacks = self._exit_callbacks
459
+ self._exit_callbacks = deque()
460
+ return new_stack
461
+
462
+ def push(self, exit):
463
+ """Registers a callback with the standard __exit__ method signature.
464
+
465
+ Can suppress exceptions the same way __exit__ method can.
466
+ Also accepts any object with an __exit__ method (registering a call
467
+ to the method instead of the object itself).
468
+ """
469
+ # We use an unbound method rather than a bound method to follow
470
+ # the standard lookup behaviour for special methods.
471
+ _cb_type = type(exit)
472
+
473
+ try:
474
+ exit_method = _cb_type.__exit__
475
+ except AttributeError:
476
+ # Not a context manager, so assume it's a callable.
477
+ self._push_exit_callback(exit)
478
+ else:
479
+ self._push_cm_exit(exit, exit_method)
480
+ return exit # Allow use as a decorator.
481
+
482
+ def enter_context(self, cm):
483
+ """Enters the supplied context manager.
484
+
485
+ If successful, also pushes its __exit__ method as a callback and
486
+ returns the result of the __enter__ method.
487
+ """
488
+ # We look up the special methods on the type to match the with
489
+ # statement.
490
+ _cm_type = type(cm)
491
+ _exit = _cm_type.__exit__
492
+ result = _cm_type.__enter__(cm)
493
+ self._push_cm_exit(cm, _exit)
494
+ return result
495
+
496
+ def callback(self, callback, /, *args, **kwds):
497
+ """Registers an arbitrary callback and arguments.
498
+
499
+ Cannot suppress exceptions.
500
+ """
501
+ _exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds)
502
+
503
+ # We changed the signature, so using @wraps is not appropriate, but
504
+ # setting __wrapped__ may still help with introspection.
505
+ _exit_wrapper.__wrapped__ = callback
506
+ self._push_exit_callback(_exit_wrapper)
507
+ return callback # Allow use as a decorator
508
+
509
+ def _push_cm_exit(self, cm, cm_exit):
510
+ """Helper to correctly register callbacks to __exit__ methods."""
511
+ _exit_wrapper = self._create_exit_wrapper(cm, cm_exit)
512
+ self._push_exit_callback(_exit_wrapper, True)
513
+
514
+ def _push_exit_callback(self, callback, is_sync=True):
515
+ self._exit_callbacks.append((is_sync, callback))
516
+
517
+
518
+ # Inspired by discussions on http://bugs.python.org/issue13585
519
+ class ExitStack(_BaseExitStack, AbstractContextManager):
520
+ """Context manager for dynamic management of a stack of exit callbacks.
521
+
522
+ For example:
523
+ with ExitStack() as stack:
524
+ files = [stack.enter_context(open(fname)) for fname in filenames]
525
+ # All opened files will automatically be closed at the end of
526
+ # the with statement, even if attempts to open files later
527
+ # in the list raise an exception.
528
+ """
529
+
530
+ def __enter__(self):
531
+ return self
532
+
533
+ def __exit__(self, *exc_details):
534
+ received_exc = exc_details[0] is not None
535
+
536
+ # We manipulate the exception state so it behaves as though
537
+ # we were actually nesting multiple with statements
538
+ frame_exc = sys.exc_info()[1]
539
+ def _fix_exception_context(new_exc, old_exc):
540
+ # Context may not be correct, so find the end of the chain
541
+ while 1:
542
+ exc_context = new_exc.__context__
543
+ if exc_context is None or exc_context is old_exc:
544
+ # Context is already set correctly (see issue 20317)
545
+ return
546
+ if exc_context is frame_exc:
547
+ break
548
+ new_exc = exc_context
549
+ # Change the end of the chain to point to the exception
550
+ # we expect it to reference
551
+ new_exc.__context__ = old_exc
552
+
553
+ # Callbacks are invoked in LIFO order to match the behaviour of
554
+ # nested context managers
555
+ suppressed_exc = False
556
+ pending_raise = False
557
+ while self._exit_callbacks:
558
+ is_sync, cb = self._exit_callbacks.pop()
559
+ assert is_sync
560
+ try:
561
+ if cb(*exc_details):
562
+ suppressed_exc = True
563
+ pending_raise = False
564
+ exc_details = (None, None, None)
565
+ except:
566
+ new_exc_details = sys.exc_info()
567
+ # simulate the stack of exceptions by setting the context
568
+ _fix_exception_context(new_exc_details[1], exc_details[1])
569
+ pending_raise = True
570
+ exc_details = new_exc_details
571
+ if pending_raise:
572
+ try:
573
+ # bare "raise exc_details[1]" replaces our carefully
574
+ # set-up context
575
+ fixed_ctx = exc_details[1].__context__
576
+ raise exc_details[1]
577
+ except BaseException:
578
+ exc_details[1].__context__ = fixed_ctx
579
+ raise
580
+ return received_exc and suppressed_exc
581
+
582
+ def close(self):
583
+ """Immediately unwind the context stack."""
584
+ self.__exit__(None, None, None)
585
+
586
+
587
+ # Inspired by discussions on https://bugs.python.org/issue29302
588
+ class AsyncExitStack(_BaseExitStack, AbstractAsyncContextManager):
589
+ """Async context manager for dynamic management of a stack of exit
590
+ callbacks.
591
+
592
+ For example:
593
+ async with AsyncExitStack() as stack:
594
+ connections = [await stack.enter_async_context(get_connection())
595
+ for i in range(5)]
596
+ # All opened connections will automatically be released at the
597
+ # end of the async with statement, even if attempts to open a
598
+ # connection later in the list raise an exception.
599
+ """
600
+
601
+ @staticmethod
602
+ def _create_async_exit_wrapper(cm, cm_exit):
603
+ return MethodType(cm_exit, cm)
604
+
605
+ @staticmethod
606
+ def _create_async_cb_wrapper(callback, /, *args, **kwds):
607
+ async def _exit_wrapper(exc_type, exc, tb):
608
+ await callback(*args, **kwds)
609
+ return _exit_wrapper
610
+
611
+ async def enter_async_context(self, cm):
612
+ """Enters the supplied async context manager.
613
+
614
+ If successful, also pushes its __aexit__ method as a callback and
615
+ returns the result of the __aenter__ method.
616
+ """
617
+ _cm_type = type(cm)
618
+ _exit = _cm_type.__aexit__
619
+ result = await _cm_type.__aenter__(cm)
620
+ self._push_async_cm_exit(cm, _exit)
621
+ return result
622
+
623
+ def push_async_exit(self, exit):
624
+ """Registers a coroutine function with the standard __aexit__ method
625
+ signature.
626
+
627
+ Can suppress exceptions the same way __aexit__ method can.
628
+ Also accepts any object with an __aexit__ method (registering a call
629
+ to the method instead of the object itself).
630
+ """
631
+ _cb_type = type(exit)
632
+ try:
633
+ exit_method = _cb_type.__aexit__
634
+ except AttributeError:
635
+ # Not an async context manager, so assume it's a coroutine function
636
+ self._push_exit_callback(exit, False)
637
+ else:
638
+ self._push_async_cm_exit(exit, exit_method)
639
+ return exit # Allow use as a decorator
640
+
641
+ def push_async_callback(self, callback, /, *args, **kwds):
642
+ """Registers an arbitrary coroutine function and arguments.
643
+
644
+ Cannot suppress exceptions.
645
+ """
646
+ _exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds)
647
+
648
+ # We changed the signature, so using @wraps is not appropriate, but
649
+ # setting __wrapped__ may still help with introspection.
650
+ _exit_wrapper.__wrapped__ = callback
651
+ self._push_exit_callback(_exit_wrapper, False)
652
+ return callback # Allow use as a decorator
653
+
654
+ async def aclose(self):
655
+ """Immediately unwind the context stack."""
656
+ await self.__aexit__(None, None, None)
657
+
658
+ def _push_async_cm_exit(self, cm, cm_exit):
659
+ """Helper to correctly register coroutine function to __aexit__
660
+ method."""
661
+ _exit_wrapper = self._create_async_exit_wrapper(cm, cm_exit)
662
+ self._push_exit_callback(_exit_wrapper, False)
663
+
664
+ async def __aenter__(self):
665
+ return self
666
+
667
+ async def __aexit__(self, *exc_details):
668
+ received_exc = exc_details[0] is not None
669
+
670
+ # We manipulate the exception state so it behaves as though
671
+ # we were actually nesting multiple with statements
672
+ frame_exc = sys.exc_info()[1]
673
+ def _fix_exception_context(new_exc, old_exc):
674
+ # Context may not be correct, so find the end of the chain
675
+ while 1:
676
+ exc_context = new_exc.__context__
677
+ if exc_context is None or exc_context is old_exc:
678
+ # Context is already set correctly (see issue 20317)
679
+ return
680
+ if exc_context is frame_exc:
681
+ break
682
+ new_exc = exc_context
683
+ # Change the end of the chain to point to the exception
684
+ # we expect it to reference
685
+ new_exc.__context__ = old_exc
686
+
687
+ # Callbacks are invoked in LIFO order to match the behaviour of
688
+ # nested context managers
689
+ suppressed_exc = False
690
+ pending_raise = False
691
+ while self._exit_callbacks:
692
+ is_sync, cb = self._exit_callbacks.pop()
693
+ try:
694
+ if is_sync:
695
+ cb_suppress = cb(*exc_details)
696
+ else:
697
+ cb_suppress = await cb(*exc_details)
698
+
699
+ if cb_suppress:
700
+ suppressed_exc = True
701
+ pending_raise = False
702
+ exc_details = (None, None, None)
703
+ except:
704
+ new_exc_details = sys.exc_info()
705
+ # simulate the stack of exceptions by setting the context
706
+ _fix_exception_context(new_exc_details[1], exc_details[1])
707
+ pending_raise = True
708
+ exc_details = new_exc_details
709
+ if pending_raise:
710
+ try:
711
+ # bare "raise exc_details[1]" replaces our carefully
712
+ # set-up context
713
+ fixed_ctx = exc_details[1].__context__
714
+ raise exc_details[1]
715
+ except BaseException:
716
+ exc_details[1].__context__ = fixed_ctx
717
+ raise
718
+ return received_exc and suppressed_exc
719
+
720
+
721
+ class nullcontext(AbstractContextManager, AbstractAsyncContextManager):
722
+ """Context manager that does no additional processing.
723
+
724
+ Used as a stand-in for a normal context manager, when a particular
725
+ block of code is only sometimes used with a normal context manager:
726
+
727
+ cm = optional_cm if condition else nullcontext()
728
+ with cm:
729
+ # Perform operation, using optional_cm if condition is True
730
+ """
731
+
732
+ def __init__(self, enter_result=None):
733
+ self.enter_result = enter_result
734
+
735
+ def __enter__(self):
736
+ return self.enter_result
737
+
738
+ def __exit__(self, *excinfo):
739
+ pass
740
+
741
+ async def __aenter__(self):
742
+ return self.enter_result
743
+
744
+ async def __aexit__(self, *excinfo):
745
+ pass
janus/lib/python3.10/copy.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Generic (shallow and deep) copying operations.
2
+
3
+ Interface summary:
4
+
5
+ import copy
6
+
7
+ x = copy.copy(y) # make a shallow copy of y
8
+ x = copy.deepcopy(y) # make a deep copy of y
9
+
10
+ For module specific errors, copy.Error is raised.
11
+
12
+ The difference between shallow and deep copying is only relevant for
13
+ compound objects (objects that contain other objects, like lists or
14
+ class instances).
15
+
16
+ - A shallow copy constructs a new compound object and then (to the
17
+ extent possible) inserts *the same objects* into it that the
18
+ original contains.
19
+
20
+ - A deep copy constructs a new compound object and then, recursively,
21
+ inserts *copies* into it of the objects found in the original.
22
+
23
+ Two problems often exist with deep copy operations that don't exist
24
+ with shallow copy operations:
25
+
26
+ a) recursive objects (compound objects that, directly or indirectly,
27
+ contain a reference to themselves) may cause a recursive loop
28
+
29
+ b) because deep copy copies *everything* it may copy too much, e.g.
30
+ administrative data structures that should be shared even between
31
+ copies
32
+
33
+ Python's deep copy operation avoids these problems by:
34
+
35
+ a) keeping a table of objects already copied during the current
36
+ copying pass
37
+
38
+ b) letting user-defined classes override the copying operation or the
39
+ set of components copied
40
+
41
+ This version does not copy types like module, class, function, method,
42
+ nor stack trace, stack frame, nor file, socket, window, nor any
43
+ similar types.
44
+
45
+ Classes can use the same interfaces to control copying that they use
46
+ to control pickling: they can define methods called __getinitargs__(),
47
+ __getstate__() and __setstate__(). See the documentation for module
48
+ "pickle" for information on these methods.
49
+ """
50
+
51
+ import types
52
+ import weakref
53
+ from copyreg import dispatch_table
54
+
55
+ class Error(Exception):
56
+ pass
57
+ error = Error # backward compatibility
58
+
59
+ try:
60
+ from org.python.core import PyStringMap
61
+ except ImportError:
62
+ PyStringMap = None
63
+
64
+ __all__ = ["Error", "copy", "deepcopy"]
65
+
66
+ def copy(x):
67
+ """Shallow copy operation on arbitrary Python objects.
68
+
69
+ See the module's __doc__ string for more info.
70
+ """
71
+
72
+ cls = type(x)
73
+
74
+ copier = _copy_dispatch.get(cls)
75
+ if copier:
76
+ return copier(x)
77
+
78
+ if issubclass(cls, type):
79
+ # treat it as a regular class:
80
+ return _copy_immutable(x)
81
+
82
+ copier = getattr(cls, "__copy__", None)
83
+ if copier is not None:
84
+ return copier(x)
85
+
86
+ reductor = dispatch_table.get(cls)
87
+ if reductor is not None:
88
+ rv = reductor(x)
89
+ else:
90
+ reductor = getattr(x, "__reduce_ex__", None)
91
+ if reductor is not None:
92
+ rv = reductor(4)
93
+ else:
94
+ reductor = getattr(x, "__reduce__", None)
95
+ if reductor:
96
+ rv = reductor()
97
+ else:
98
+ raise Error("un(shallow)copyable object of type %s" % cls)
99
+
100
+ if isinstance(rv, str):
101
+ return x
102
+ return _reconstruct(x, None, *rv)
103
+
104
+
105
+ _copy_dispatch = d = {}
106
+
107
+ def _copy_immutable(x):
108
+ return x
109
+ for t in (type(None), int, float, bool, complex, str, tuple,
110
+ bytes, frozenset, type, range, slice, property,
111
+ types.BuiltinFunctionType, type(Ellipsis), type(NotImplemented),
112
+ types.FunctionType, weakref.ref):
113
+ d[t] = _copy_immutable
114
+ t = getattr(types, "CodeType", None)
115
+ if t is not None:
116
+ d[t] = _copy_immutable
117
+
118
+ d[list] = list.copy
119
+ d[dict] = dict.copy
120
+ d[set] = set.copy
121
+ d[bytearray] = bytearray.copy
122
+
123
+ if PyStringMap is not None:
124
+ d[PyStringMap] = PyStringMap.copy
125
+
126
+ del d, t
127
+
128
+ def deepcopy(x, memo=None, _nil=[]):
129
+ """Deep copy operation on arbitrary Python objects.
130
+
131
+ See the module's __doc__ string for more info.
132
+ """
133
+
134
+ if memo is None:
135
+ memo = {}
136
+
137
+ d = id(x)
138
+ y = memo.get(d, _nil)
139
+ if y is not _nil:
140
+ return y
141
+
142
+ cls = type(x)
143
+
144
+ copier = _deepcopy_dispatch.get(cls)
145
+ if copier is not None:
146
+ y = copier(x, memo)
147
+ else:
148
+ if issubclass(cls, type):
149
+ y = _deepcopy_atomic(x, memo)
150
+ else:
151
+ copier = getattr(x, "__deepcopy__", None)
152
+ if copier is not None:
153
+ y = copier(memo)
154
+ else:
155
+ reductor = dispatch_table.get(cls)
156
+ if reductor:
157
+ rv = reductor(x)
158
+ else:
159
+ reductor = getattr(x, "__reduce_ex__", None)
160
+ if reductor is not None:
161
+ rv = reductor(4)
162
+ else:
163
+ reductor = getattr(x, "__reduce__", None)
164
+ if reductor:
165
+ rv = reductor()
166
+ else:
167
+ raise Error(
168
+ "un(deep)copyable object of type %s" % cls)
169
+ if isinstance(rv, str):
170
+ y = x
171
+ else:
172
+ y = _reconstruct(x, memo, *rv)
173
+
174
+ # If is its own copy, don't memoize.
175
+ if y is not x:
176
+ memo[d] = y
177
+ _keep_alive(x, memo) # Make sure x lives at least as long as d
178
+ return y
179
+
180
+ _deepcopy_dispatch = d = {}
181
+
182
+ def _deepcopy_atomic(x, memo):
183
+ return x
184
+ d[type(None)] = _deepcopy_atomic
185
+ d[type(Ellipsis)] = _deepcopy_atomic
186
+ d[type(NotImplemented)] = _deepcopy_atomic
187
+ d[int] = _deepcopy_atomic
188
+ d[float] = _deepcopy_atomic
189
+ d[bool] = _deepcopy_atomic
190
+ d[complex] = _deepcopy_atomic
191
+ d[bytes] = _deepcopy_atomic
192
+ d[str] = _deepcopy_atomic
193
+ d[types.CodeType] = _deepcopy_atomic
194
+ d[type] = _deepcopy_atomic
195
+ d[range] = _deepcopy_atomic
196
+ d[types.BuiltinFunctionType] = _deepcopy_atomic
197
+ d[types.FunctionType] = _deepcopy_atomic
198
+ d[weakref.ref] = _deepcopy_atomic
199
+ d[property] = _deepcopy_atomic
200
+
201
+ def _deepcopy_list(x, memo, deepcopy=deepcopy):
202
+ y = []
203
+ memo[id(x)] = y
204
+ append = y.append
205
+ for a in x:
206
+ append(deepcopy(a, memo))
207
+ return y
208
+ d[list] = _deepcopy_list
209
+
210
+ def _deepcopy_tuple(x, memo, deepcopy=deepcopy):
211
+ y = [deepcopy(a, memo) for a in x]
212
+ # We're not going to put the tuple in the memo, but it's still important we
213
+ # check for it, in case the tuple contains recursive mutable structures.
214
+ try:
215
+ return memo[id(x)]
216
+ except KeyError:
217
+ pass
218
+ for k, j in zip(x, y):
219
+ if k is not j:
220
+ y = tuple(y)
221
+ break
222
+ else:
223
+ y = x
224
+ return y
225
+ d[tuple] = _deepcopy_tuple
226
+
227
+ def _deepcopy_dict(x, memo, deepcopy=deepcopy):
228
+ y = {}
229
+ memo[id(x)] = y
230
+ for key, value in x.items():
231
+ y[deepcopy(key, memo)] = deepcopy(value, memo)
232
+ return y
233
+ d[dict] = _deepcopy_dict
234
+ if PyStringMap is not None:
235
+ d[PyStringMap] = _deepcopy_dict
236
+
237
+ def _deepcopy_method(x, memo): # Copy instance methods
238
+ return type(x)(x.__func__, deepcopy(x.__self__, memo))
239
+ d[types.MethodType] = _deepcopy_method
240
+
241
+ del d
242
+
243
+ def _keep_alive(x, memo):
244
+ """Keeps a reference to the object x in the memo.
245
+
246
+ Because we remember objects by their id, we have
247
+ to assure that possibly temporary objects are kept
248
+ alive by referencing them.
249
+ We store a reference at the id of the memo, which should
250
+ normally not be used unless someone tries to deepcopy
251
+ the memo itself...
252
+ """
253
+ try:
254
+ memo[id(memo)].append(x)
255
+ except KeyError:
256
+ # aha, this is the first one :-)
257
+ memo[id(memo)]=[x]
258
+
259
+ def _reconstruct(x, memo, func, args,
260
+ state=None, listiter=None, dictiter=None,
261
+ *, deepcopy=deepcopy):
262
+ deep = memo is not None
263
+ if deep and args:
264
+ args = (deepcopy(arg, memo) for arg in args)
265
+ y = func(*args)
266
+ if deep:
267
+ memo[id(x)] = y
268
+
269
+ if state is not None:
270
+ if deep:
271
+ state = deepcopy(state, memo)
272
+ if hasattr(y, '__setstate__'):
273
+ y.__setstate__(state)
274
+ else:
275
+ if isinstance(state, tuple) and len(state) == 2:
276
+ state, slotstate = state
277
+ else:
278
+ slotstate = None
279
+ if state is not None:
280
+ y.__dict__.update(state)
281
+ if slotstate is not None:
282
+ for key, value in slotstate.items():
283
+ setattr(y, key, value)
284
+
285
+ if listiter is not None:
286
+ if deep:
287
+ for item in listiter:
288
+ item = deepcopy(item, memo)
289
+ y.append(item)
290
+ else:
291
+ for item in listiter:
292
+ y.append(item)
293
+ if dictiter is not None:
294
+ if deep:
295
+ for key, value in dictiter:
296
+ key = deepcopy(key, memo)
297
+ value = deepcopy(value, memo)
298
+ y[key] = value
299
+ else:
300
+ for key, value in dictiter:
301
+ y[key] = value
302
+ return y
303
+
304
+ del types, weakref, PyStringMap
janus/lib/python3.10/crypt.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Wrapper to the POSIX crypt library call and associated functionality."""
2
+
3
+ import sys as _sys
4
+
5
+ try:
6
+ import _crypt
7
+ except ModuleNotFoundError:
8
+ if _sys.platform == 'win32':
9
+ raise ImportError("The crypt module is not supported on Windows")
10
+ else:
11
+ raise ImportError("The required _crypt module was not built as part of CPython")
12
+
13
+ import errno
14
+ import string as _string
15
+ from random import SystemRandom as _SystemRandom
16
+ from collections import namedtuple as _namedtuple
17
+
18
+
19
+ _saltchars = _string.ascii_letters + _string.digits + './'
20
+ _sr = _SystemRandom()
21
+
22
+
23
+ class _Method(_namedtuple('_Method', 'name ident salt_chars total_size')):
24
+
25
+ """Class representing a salt method per the Modular Crypt Format or the
26
+ legacy 2-character crypt method."""
27
+
28
+ def __repr__(self):
29
+ return '<crypt.METHOD_{}>'.format(self.name)
30
+
31
+
32
+ def mksalt(method=None, *, rounds=None):
33
+ """Generate a salt for the specified method.
34
+
35
+ If not specified, the strongest available method will be used.
36
+
37
+ """
38
+ if method is None:
39
+ method = methods[0]
40
+ if rounds is not None and not isinstance(rounds, int):
41
+ raise TypeError(f'{rounds.__class__.__name__} object cannot be '
42
+ f'interpreted as an integer')
43
+ if not method.ident: # traditional
44
+ s = ''
45
+ else: # modular
46
+ s = f'${method.ident}$'
47
+
48
+ if method.ident and method.ident[0] == '2': # Blowfish variants
49
+ if rounds is None:
50
+ log_rounds = 12
51
+ else:
52
+ log_rounds = int.bit_length(rounds-1)
53
+ if rounds != 1 << log_rounds:
54
+ raise ValueError('rounds must be a power of 2')
55
+ if not 4 <= log_rounds <= 31:
56
+ raise ValueError('rounds out of the range 2**4 to 2**31')
57
+ s += f'{log_rounds:02d}$'
58
+ elif method.ident in ('5', '6'): # SHA-2
59
+ if rounds is not None:
60
+ if not 1000 <= rounds <= 999_999_999:
61
+ raise ValueError('rounds out of the range 1000 to 999_999_999')
62
+ s += f'rounds={rounds}$'
63
+ elif rounds is not None:
64
+ raise ValueError(f"{method} doesn't support the rounds argument")
65
+
66
+ s += ''.join(_sr.choice(_saltchars) for char in range(method.salt_chars))
67
+ return s
68
+
69
+
70
+ def crypt(word, salt=None):
71
+ """Return a string representing the one-way hash of a password, with a salt
72
+ prepended.
73
+
74
+ If ``salt`` is not specified or is ``None``, the strongest
75
+ available method will be selected and a salt generated. Otherwise,
76
+ ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as
77
+ returned by ``crypt.mksalt()``.
78
+
79
+ """
80
+ if salt is None or isinstance(salt, _Method):
81
+ salt = mksalt(salt)
82
+ return _crypt.crypt(word, salt)
83
+
84
+
85
+ # available salting/crypto methods
86
+ methods = []
87
+
88
+ def _add_method(name, *args, rounds=None):
89
+ method = _Method(name, *args)
90
+ globals()['METHOD_' + name] = method
91
+ salt = mksalt(method, rounds=rounds)
92
+ result = None
93
+ try:
94
+ result = crypt('', salt)
95
+ except OSError as e:
96
+ # Not all libc libraries support all encryption methods.
97
+ if e.errno in {errno.EINVAL, errno.EPERM, errno.ENOSYS}:
98
+ return False
99
+ raise
100
+ if result and len(result) == method.total_size:
101
+ methods.append(method)
102
+ return True
103
+ return False
104
+
105
+ _add_method('SHA512', '6', 16, 106)
106
+ _add_method('SHA256', '5', 16, 63)
107
+
108
+ # Choose the strongest supported version of Blowfish hashing.
109
+ # Early versions have flaws. Version 'a' fixes flaws of
110
+ # the initial implementation, 'b' fixes flaws of 'a'.
111
+ # 'y' is the same as 'b', for compatibility
112
+ # with openwall crypt_blowfish.
113
+ for _v in 'b', 'y', 'a', '':
114
+ if _add_method('BLOWFISH', '2' + _v, 22, 59 + len(_v), rounds=1<<4):
115
+ break
116
+
117
+ _add_method('MD5', '1', 8, 34)
118
+ _add_method('CRYPT', None, 2, 13)
119
+
120
+ del _v, _add_method
janus/lib/python3.10/csv.py ADDED
@@ -0,0 +1,444 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ """
3
+ csv.py - read/write/investigate CSV files
4
+ """
5
+
6
+ import re
7
+ from _csv import Error, __version__, writer, reader, register_dialect, \
8
+ unregister_dialect, get_dialect, list_dialects, \
9
+ field_size_limit, \
10
+ QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, \
11
+ __doc__
12
+ from _csv import Dialect as _Dialect
13
+
14
+ from io import StringIO
15
+
16
+ __all__ = ["QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE",
17
+ "Error", "Dialect", "__doc__", "excel", "excel_tab",
18
+ "field_size_limit", "reader", "writer",
19
+ "register_dialect", "get_dialect", "list_dialects", "Sniffer",
20
+ "unregister_dialect", "__version__", "DictReader", "DictWriter",
21
+ "unix_dialect"]
22
+
23
+ class Dialect:
24
+ """Describe a CSV dialect.
25
+
26
+ This must be subclassed (see csv.excel). Valid attributes are:
27
+ delimiter, quotechar, escapechar, doublequote, skipinitialspace,
28
+ lineterminator, quoting.
29
+
30
+ """
31
+ _name = ""
32
+ _valid = False
33
+ # placeholders
34
+ delimiter = None
35
+ quotechar = None
36
+ escapechar = None
37
+ doublequote = None
38
+ skipinitialspace = None
39
+ lineterminator = None
40
+ quoting = None
41
+
42
+ def __init__(self):
43
+ if self.__class__ != Dialect:
44
+ self._valid = True
45
+ self._validate()
46
+
47
+ def _validate(self):
48
+ try:
49
+ _Dialect(self)
50
+ except TypeError as e:
51
+ # We do this for compatibility with py2.3
52
+ raise Error(str(e))
53
+
54
+ class excel(Dialect):
55
+ """Describe the usual properties of Excel-generated CSV files."""
56
+ delimiter = ','
57
+ quotechar = '"'
58
+ doublequote = True
59
+ skipinitialspace = False
60
+ lineterminator = '\r\n'
61
+ quoting = QUOTE_MINIMAL
62
+ register_dialect("excel", excel)
63
+
64
+ class excel_tab(excel):
65
+ """Describe the usual properties of Excel-generated TAB-delimited files."""
66
+ delimiter = '\t'
67
+ register_dialect("excel-tab", excel_tab)
68
+
69
+ class unix_dialect(Dialect):
70
+ """Describe the usual properties of Unix-generated CSV files."""
71
+ delimiter = ','
72
+ quotechar = '"'
73
+ doublequote = True
74
+ skipinitialspace = False
75
+ lineterminator = '\n'
76
+ quoting = QUOTE_ALL
77
+ register_dialect("unix", unix_dialect)
78
+
79
+
80
+ class DictReader:
81
+ def __init__(self, f, fieldnames=None, restkey=None, restval=None,
82
+ dialect="excel", *args, **kwds):
83
+ self._fieldnames = fieldnames # list of keys for the dict
84
+ self.restkey = restkey # key to catch long rows
85
+ self.restval = restval # default value for short rows
86
+ self.reader = reader(f, dialect, *args, **kwds)
87
+ self.dialect = dialect
88
+ self.line_num = 0
89
+
90
+ def __iter__(self):
91
+ return self
92
+
93
+ @property
94
+ def fieldnames(self):
95
+ if self._fieldnames is None:
96
+ try:
97
+ self._fieldnames = next(self.reader)
98
+ except StopIteration:
99
+ pass
100
+ self.line_num = self.reader.line_num
101
+ return self._fieldnames
102
+
103
+ @fieldnames.setter
104
+ def fieldnames(self, value):
105
+ self._fieldnames = value
106
+
107
+ def __next__(self):
108
+ if self.line_num == 0:
109
+ # Used only for its side effect.
110
+ self.fieldnames
111
+ row = next(self.reader)
112
+ self.line_num = self.reader.line_num
113
+
114
+ # unlike the basic reader, we prefer not to return blanks,
115
+ # because we will typically wind up with a dict full of None
116
+ # values
117
+ while row == []:
118
+ row = next(self.reader)
119
+ d = dict(zip(self.fieldnames, row))
120
+ lf = len(self.fieldnames)
121
+ lr = len(row)
122
+ if lf < lr:
123
+ d[self.restkey] = row[lf:]
124
+ elif lf > lr:
125
+ for key in self.fieldnames[lr:]:
126
+ d[key] = self.restval
127
+ return d
128
+
129
+
130
+ class DictWriter:
131
+ def __init__(self, f, fieldnames, restval="", extrasaction="raise",
132
+ dialect="excel", *args, **kwds):
133
+ self.fieldnames = fieldnames # list of keys for the dict
134
+ self.restval = restval # for writing short dicts
135
+ if extrasaction.lower() not in ("raise", "ignore"):
136
+ raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
137
+ % extrasaction)
138
+ self.extrasaction = extrasaction
139
+ self.writer = writer(f, dialect, *args, **kwds)
140
+
141
+ def writeheader(self):
142
+ header = dict(zip(self.fieldnames, self.fieldnames))
143
+ return self.writerow(header)
144
+
145
+ def _dict_to_list(self, rowdict):
146
+ if self.extrasaction == "raise":
147
+ wrong_fields = rowdict.keys() - self.fieldnames
148
+ if wrong_fields:
149
+ raise ValueError("dict contains fields not in fieldnames: "
150
+ + ", ".join([repr(x) for x in wrong_fields]))
151
+ return (rowdict.get(key, self.restval) for key in self.fieldnames)
152
+
153
+ def writerow(self, rowdict):
154
+ return self.writer.writerow(self._dict_to_list(rowdict))
155
+
156
+ def writerows(self, rowdicts):
157
+ return self.writer.writerows(map(self._dict_to_list, rowdicts))
158
+
159
+ # Guard Sniffer's type checking against builds that exclude complex()
160
+ try:
161
+ complex
162
+ except NameError:
163
+ complex = float
164
+
165
+ class Sniffer:
166
+ '''
167
+ "Sniffs" the format of a CSV file (i.e. delimiter, quotechar)
168
+ Returns a Dialect object.
169
+ '''
170
+ def __init__(self):
171
+ # in case there is more than one possible delimiter
172
+ self.preferred = [',', '\t', ';', ' ', ':']
173
+
174
+
175
+ def sniff(self, sample, delimiters=None):
176
+ """
177
+ Returns a dialect (or None) corresponding to the sample
178
+ """
179
+
180
+ quotechar, doublequote, delimiter, skipinitialspace = \
181
+ self._guess_quote_and_delimiter(sample, delimiters)
182
+ if not delimiter:
183
+ delimiter, skipinitialspace = self._guess_delimiter(sample,
184
+ delimiters)
185
+
186
+ if not delimiter:
187
+ raise Error("Could not determine delimiter")
188
+
189
+ class dialect(Dialect):
190
+ _name = "sniffed"
191
+ lineterminator = '\r\n'
192
+ quoting = QUOTE_MINIMAL
193
+ # escapechar = ''
194
+
195
+ dialect.doublequote = doublequote
196
+ dialect.delimiter = delimiter
197
+ # _csv.reader won't accept a quotechar of ''
198
+ dialect.quotechar = quotechar or '"'
199
+ dialect.skipinitialspace = skipinitialspace
200
+
201
+ return dialect
202
+
203
+
204
+ def _guess_quote_and_delimiter(self, data, delimiters):
205
+ """
206
+ Looks for text enclosed between two identical quotes
207
+ (the probable quotechar) which are preceded and followed
208
+ by the same character (the probable delimiter).
209
+ For example:
210
+ ,'some text',
211
+ The quote with the most wins, same with the delimiter.
212
+ If there is no quotechar the delimiter can't be determined
213
+ this way.
214
+ """
215
+
216
+ matches = []
217
+ for restr in (r'(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', # ,".*?",
218
+ r'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)', # ".*?",
219
+ r'(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)', # ,".*?"
220
+ r'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space)
221
+ regexp = re.compile(restr, re.DOTALL | re.MULTILINE)
222
+ matches = regexp.findall(data)
223
+ if matches:
224
+ break
225
+
226
+ if not matches:
227
+ # (quotechar, doublequote, delimiter, skipinitialspace)
228
+ return ('', False, None, 0)
229
+ quotes = {}
230
+ delims = {}
231
+ spaces = 0
232
+ groupindex = regexp.groupindex
233
+ for m in matches:
234
+ n = groupindex['quote'] - 1
235
+ key = m[n]
236
+ if key:
237
+ quotes[key] = quotes.get(key, 0) + 1
238
+ try:
239
+ n = groupindex['delim'] - 1
240
+ key = m[n]
241
+ except KeyError:
242
+ continue
243
+ if key and (delimiters is None or key in delimiters):
244
+ delims[key] = delims.get(key, 0) + 1
245
+ try:
246
+ n = groupindex['space'] - 1
247
+ except KeyError:
248
+ continue
249
+ if m[n]:
250
+ spaces += 1
251
+
252
+ quotechar = max(quotes, key=quotes.get)
253
+
254
+ if delims:
255
+ delim = max(delims, key=delims.get)
256
+ skipinitialspace = delims[delim] == spaces
257
+ if delim == '\n': # most likely a file with a single column
258
+ delim = ''
259
+ else:
260
+ # there is *no* delimiter, it's a single column of quoted data
261
+ delim = ''
262
+ skipinitialspace = 0
263
+
264
+ # if we see an extra quote between delimiters, we've got a
265
+ # double quoted format
266
+ dq_regexp = re.compile(
267
+ r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % \
268
+ {'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE)
269
+
270
+
271
+
272
+ if dq_regexp.search(data):
273
+ doublequote = True
274
+ else:
275
+ doublequote = False
276
+
277
+ return (quotechar, doublequote, delim, skipinitialspace)
278
+
279
+
280
+ def _guess_delimiter(self, data, delimiters):
281
+ """
282
+ The delimiter /should/ occur the same number of times on
283
+ each row. However, due to malformed data, it may not. We don't want
284
+ an all or nothing approach, so we allow for small variations in this
285
+ number.
286
+ 1) build a table of the frequency of each character on every line.
287
+ 2) build a table of frequencies of this frequency (meta-frequency?),
288
+ e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows,
289
+ 7 times in 2 rows'
290
+ 3) use the mode of the meta-frequency to determine the /expected/
291
+ frequency for that character
292
+ 4) find out how often the character actually meets that goal
293
+ 5) the character that best meets its goal is the delimiter
294
+ For performance reasons, the data is evaluated in chunks, so it can
295
+ try and evaluate the smallest portion of the data possible, evaluating
296
+ additional chunks as necessary.
297
+ """
298
+
299
+ data = list(filter(None, data.split('\n')))
300
+
301
+ ascii = [chr(c) for c in range(127)] # 7-bit ASCII
302
+
303
+ # build frequency tables
304
+ chunkLength = min(10, len(data))
305
+ iteration = 0
306
+ charFrequency = {}
307
+ modes = {}
308
+ delims = {}
309
+ start, end = 0, chunkLength
310
+ while start < len(data):
311
+ iteration += 1
312
+ for line in data[start:end]:
313
+ for char in ascii:
314
+ metaFrequency = charFrequency.get(char, {})
315
+ # must count even if frequency is 0
316
+ freq = line.count(char)
317
+ # value is the mode
318
+ metaFrequency[freq] = metaFrequency.get(freq, 0) + 1
319
+ charFrequency[char] = metaFrequency
320
+
321
+ for char in charFrequency.keys():
322
+ items = list(charFrequency[char].items())
323
+ if len(items) == 1 and items[0][0] == 0:
324
+ continue
325
+ # get the mode of the frequencies
326
+ if len(items) > 1:
327
+ modes[char] = max(items, key=lambda x: x[1])
328
+ # adjust the mode - subtract the sum of all
329
+ # other frequencies
330
+ items.remove(modes[char])
331
+ modes[char] = (modes[char][0], modes[char][1]
332
+ - sum(item[1] for item in items))
333
+ else:
334
+ modes[char] = items[0]
335
+
336
+ # build a list of possible delimiters
337
+ modeList = modes.items()
338
+ total = float(min(chunkLength * iteration, len(data)))
339
+ # (rows of consistent data) / (number of rows) = 100%
340
+ consistency = 1.0
341
+ # minimum consistency threshold
342
+ threshold = 0.9
343
+ while len(delims) == 0 and consistency >= threshold:
344
+ for k, v in modeList:
345
+ if v[0] > 0 and v[1] > 0:
346
+ if ((v[1]/total) >= consistency and
347
+ (delimiters is None or k in delimiters)):
348
+ delims[k] = v
349
+ consistency -= 0.01
350
+
351
+ if len(delims) == 1:
352
+ delim = list(delims.keys())[0]
353
+ skipinitialspace = (data[0].count(delim) ==
354
+ data[0].count("%c " % delim))
355
+ return (delim, skipinitialspace)
356
+
357
+ # analyze another chunkLength lines
358
+ start = end
359
+ end += chunkLength
360
+
361
+ if not delims:
362
+ return ('', 0)
363
+
364
+ # if there's more than one, fall back to a 'preferred' list
365
+ if len(delims) > 1:
366
+ for d in self.preferred:
367
+ if d in delims.keys():
368
+ skipinitialspace = (data[0].count(d) ==
369
+ data[0].count("%c " % d))
370
+ return (d, skipinitialspace)
371
+
372
+ # nothing else indicates a preference, pick the character that
373
+ # dominates(?)
374
+ items = [(v,k) for (k,v) in delims.items()]
375
+ items.sort()
376
+ delim = items[-1][1]
377
+
378
+ skipinitialspace = (data[0].count(delim) ==
379
+ data[0].count("%c " % delim))
380
+ return (delim, skipinitialspace)
381
+
382
+
383
+ def has_header(self, sample):
384
+ # Creates a dictionary of types of data in each column. If any
385
+ # column is of a single type (say, integers), *except* for the first
386
+ # row, then the first row is presumed to be labels. If the type
387
+ # can't be determined, it is assumed to be a string in which case
388
+ # the length of the string is the determining factor: if all of the
389
+ # rows except for the first are the same length, it's a header.
390
+ # Finally, a 'vote' is taken at the end for each column, adding or
391
+ # subtracting from the likelihood of the first row being a header.
392
+
393
+ rdr = reader(StringIO(sample), self.sniff(sample))
394
+
395
+ header = next(rdr) # assume first row is header
396
+
397
+ columns = len(header)
398
+ columnTypes = {}
399
+ for i in range(columns): columnTypes[i] = None
400
+
401
+ checked = 0
402
+ for row in rdr:
403
+ # arbitrary number of rows to check, to keep it sane
404
+ if checked > 20:
405
+ break
406
+ checked += 1
407
+
408
+ if len(row) != columns:
409
+ continue # skip rows that have irregular number of columns
410
+
411
+ for col in list(columnTypes.keys()):
412
+ thisType = complex
413
+ try:
414
+ thisType(row[col])
415
+ except (ValueError, OverflowError):
416
+ # fallback to length of string
417
+ thisType = len(row[col])
418
+
419
+ if thisType != columnTypes[col]:
420
+ if columnTypes[col] is None: # add new column type
421
+ columnTypes[col] = thisType
422
+ else:
423
+ # type is inconsistent, remove column from
424
+ # consideration
425
+ del columnTypes[col]
426
+
427
+ # finally, compare results against first row and "vote"
428
+ # on whether it's a header
429
+ hasHeader = 0
430
+ for col, colType in columnTypes.items():
431
+ if type(colType) == type(0): # it's a length
432
+ if len(header[col]) != colType:
433
+ hasHeader += 1
434
+ else:
435
+ hasHeader -= 1
436
+ else: # attempt typecast
437
+ try:
438
+ colType(header[col])
439
+ except (ValueError, TypeError):
440
+ hasHeader += 1
441
+ else:
442
+ hasHeader -= 1
443
+
444
+ return hasHeader > 0
janus/lib/python3.10/dataclasses.py ADDED
@@ -0,0 +1,1453 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import sys
3
+ import copy
4
+ import types
5
+ import inspect
6
+ import keyword
7
+ import builtins
8
+ import functools
9
+ import abc
10
+ import _thread
11
+ from types import FunctionType, GenericAlias
12
+
13
+
14
+ __all__ = ['dataclass',
15
+ 'field',
16
+ 'Field',
17
+ 'FrozenInstanceError',
18
+ 'InitVar',
19
+ 'KW_ONLY',
20
+ 'MISSING',
21
+
22
+ # Helper functions.
23
+ 'fields',
24
+ 'asdict',
25
+ 'astuple',
26
+ 'make_dataclass',
27
+ 'replace',
28
+ 'is_dataclass',
29
+ ]
30
+
31
+ # Conditions for adding methods. The boxes indicate what action the
32
+ # dataclass decorator takes. For all of these tables, when I talk
33
+ # about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm
34
+ # referring to the arguments to the @dataclass decorator. When
35
+ # checking if a dunder method already exists, I mean check for an
36
+ # entry in the class's __dict__. I never check to see if an attribute
37
+ # is defined in a base class.
38
+
39
+ # Key:
40
+ # +=========+=========================================+
41
+ # + Value | Meaning |
42
+ # +=========+=========================================+
43
+ # | <blank> | No action: no method is added. |
44
+ # +---------+-----------------------------------------+
45
+ # | add | Generated method is added. |
46
+ # +---------+-----------------------------------------+
47
+ # | raise | TypeError is raised. |
48
+ # +---------+-----------------------------------------+
49
+ # | None | Attribute is set to None. |
50
+ # +=========+=========================================+
51
+
52
+ # __init__
53
+ #
54
+ # +--- init= parameter
55
+ # |
56
+ # v | | |
57
+ # | no | yes | <--- class has __init__ in __dict__?
58
+ # +=======+=======+=======+
59
+ # | False | | |
60
+ # +-------+-------+-------+
61
+ # | True | add | | <- the default
62
+ # +=======+=======+=======+
63
+
64
+ # __repr__
65
+ #
66
+ # +--- repr= parameter
67
+ # |
68
+ # v | | |
69
+ # | no | yes | <--- class has __repr__ in __dict__?
70
+ # +=======+=======+=======+
71
+ # | False | | |
72
+ # +-------+-------+-------+
73
+ # | True | add | | <- the default
74
+ # +=======+=======+=======+
75
+
76
+
77
+ # __setattr__
78
+ # __delattr__
79
+ #
80
+ # +--- frozen= parameter
81
+ # |
82
+ # v | | |
83
+ # | no | yes | <--- class has __setattr__ or __delattr__ in __dict__?
84
+ # +=======+=======+=======+
85
+ # | False | | | <- the default
86
+ # +-------+-------+-------+
87
+ # | True | add | raise |
88
+ # +=======+=======+=======+
89
+ # Raise because not adding these methods would break the "frozen-ness"
90
+ # of the class.
91
+
92
+ # __eq__
93
+ #
94
+ # +--- eq= parameter
95
+ # |
96
+ # v | | |
97
+ # | no | yes | <--- class has __eq__ in __dict__?
98
+ # +=======+=======+=======+
99
+ # | False | | |
100
+ # +-------+-------+-------+
101
+ # | True | add | | <- the default
102
+ # +=======+=======+=======+
103
+
104
+ # __lt__
105
+ # __le__
106
+ # __gt__
107
+ # __ge__
108
+ #
109
+ # +--- order= parameter
110
+ # |
111
+ # v | | |
112
+ # | no | yes | <--- class has any comparison method in __dict__?
113
+ # +=======+=======+=======+
114
+ # | False | | | <- the default
115
+ # +-------+-------+-------+
116
+ # | True | add | raise |
117
+ # +=======+=======+=======+
118
+ # Raise because to allow this case would interfere with using
119
+ # functools.total_ordering.
120
+
121
+ # __hash__
122
+
123
+ # +------------------- unsafe_hash= parameter
124
+ # | +----------- eq= parameter
125
+ # | | +--- frozen= parameter
126
+ # | | |
127
+ # v v v | | |
128
+ # | no | yes | <--- class has explicitly defined __hash__
129
+ # +=======+=======+=======+========+========+
130
+ # | False | False | False | | | No __eq__, use the base class __hash__
131
+ # +-------+-------+-------+--------+--------+
132
+ # | False | False | True | | | No __eq__, use the base class __hash__
133
+ # +-------+-------+-------+--------+--------+
134
+ # | False | True | False | None | | <-- the default, not hashable
135
+ # +-------+-------+-------+--------+--------+
136
+ # | False | True | True | add | | Frozen, so hashable, allows override
137
+ # +-------+-------+-------+--------+--------+
138
+ # | True | False | False | add | raise | Has no __eq__, but hashable
139
+ # +-------+-------+-------+--------+--------+
140
+ # | True | False | True | add | raise | Has no __eq__, but hashable
141
+ # +-------+-------+-------+--------+--------+
142
+ # | True | True | False | add | raise | Not frozen, but hashable
143
+ # +-------+-------+-------+--------+--------+
144
+ # | True | True | True | add | raise | Frozen, so hashable
145
+ # +=======+=======+=======+========+========+
146
+ # For boxes that are blank, __hash__ is untouched and therefore
147
+ # inherited from the base class. If the base is object, then
148
+ # id-based hashing is used.
149
+ #
150
+ # Note that a class may already have __hash__=None if it specified an
151
+ # __eq__ method in the class body (not one that was created by
152
+ # @dataclass).
153
+ #
154
+ # See _hash_action (below) for a coded version of this table.
155
+
156
+ # __match_args__
157
+ #
158
+ # +--- match_args= parameter
159
+ # |
160
+ # v | | |
161
+ # | no | yes | <--- class has __match_args__ in __dict__?
162
+ # +=======+=======+=======+
163
+ # | False | | |
164
+ # +-------+-------+-------+
165
+ # | True | add | | <- the default
166
+ # +=======+=======+=======+
167
+ # __match_args__ is always added unless the class already defines it. It is a
168
+ # tuple of __init__ parameter names; non-init fields must be matched by keyword.
169
+
170
+
171
+ # Raised when an attempt is made to modify a frozen class.
172
+ class FrozenInstanceError(AttributeError): pass
173
+
174
+ # A sentinel object for default values to signal that a default
175
+ # factory will be used. This is given a nice repr() which will appear
176
+ # in the function signature of dataclasses' constructors.
177
+ class _HAS_DEFAULT_FACTORY_CLASS:
178
+ def __repr__(self):
179
+ return '<factory>'
180
+ _HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
181
+
182
+ # A sentinel object to detect if a parameter is supplied or not. Use
183
+ # a class to give it a better repr.
184
+ class _MISSING_TYPE:
185
+ pass
186
+ MISSING = _MISSING_TYPE()
187
+
188
+ # A sentinel object to indicate that following fields are keyword-only by
189
+ # default. Use a class to give it a better repr.
190
+ class _KW_ONLY_TYPE:
191
+ pass
192
+ KW_ONLY = _KW_ONLY_TYPE()
193
+
194
+ # Since most per-field metadata will be unused, create an empty
195
+ # read-only proxy that can be shared among all fields.
196
+ _EMPTY_METADATA = types.MappingProxyType({})
197
+
198
+ # Markers for the various kinds of fields and pseudo-fields.
199
+ class _FIELD_BASE:
200
+ def __init__(self, name):
201
+ self.name = name
202
+ def __repr__(self):
203
+ return self.name
204
+ _FIELD = _FIELD_BASE('_FIELD')
205
+ _FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR')
206
+ _FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR')
207
+
208
+ # The name of an attribute on the class where we store the Field
209
+ # objects. Also used to check if a class is a Data Class.
210
+ _FIELDS = '__dataclass_fields__'
211
+
212
+ # The name of an attribute on the class that stores the parameters to
213
+ # @dataclass.
214
+ _PARAMS = '__dataclass_params__'
215
+
216
+ # The name of the function, that if it exists, is called at the end of
217
+ # __init__.
218
+ _POST_INIT_NAME = '__post_init__'
219
+
220
+ # String regex that string annotations for ClassVar or InitVar must match.
221
+ # Allows "identifier.identifier[" or "identifier[".
222
+ # https://bugs.python.org/issue33453 for details.
223
+ _MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
224
+
225
+ # This function's logic is copied from "recursive_repr" function in
226
+ # reprlib module to avoid dependency.
227
+ def _recursive_repr(user_function):
228
+ # Decorator to make a repr function return "..." for a recursive
229
+ # call.
230
+ repr_running = set()
231
+
232
+ @functools.wraps(user_function)
233
+ def wrapper(self):
234
+ key = id(self), _thread.get_ident()
235
+ if key in repr_running:
236
+ return '...'
237
+ repr_running.add(key)
238
+ try:
239
+ result = user_function(self)
240
+ finally:
241
+ repr_running.discard(key)
242
+ return result
243
+ return wrapper
244
+
245
+ class InitVar:
246
+ __slots__ = ('type', )
247
+
248
+ def __init__(self, type):
249
+ self.type = type
250
+
251
+ def __repr__(self):
252
+ if isinstance(self.type, type) and not isinstance(self.type, GenericAlias):
253
+ type_name = self.type.__name__
254
+ else:
255
+ # typing objects, e.g. List[int]
256
+ type_name = repr(self.type)
257
+ return f'dataclasses.InitVar[{type_name}]'
258
+
259
+ def __class_getitem__(cls, type):
260
+ return InitVar(type)
261
+
262
+ # Instances of Field are only ever created from within this module,
263
+ # and only from the field() function, although Field instances are
264
+ # exposed externally as (conceptually) read-only objects.
265
+ #
266
+ # name and type are filled in after the fact, not in __init__.
267
+ # They're not known at the time this class is instantiated, but it's
268
+ # convenient if they're available later.
269
+ #
270
+ # When cls._FIELDS is filled in with a list of Field objects, the name
271
+ # and type fields will have been populated.
272
+ class Field:
273
+ __slots__ = ('name',
274
+ 'type',
275
+ 'default',
276
+ 'default_factory',
277
+ 'repr',
278
+ 'hash',
279
+ 'init',
280
+ 'compare',
281
+ 'metadata',
282
+ 'kw_only',
283
+ '_field_type', # Private: not to be used by user code.
284
+ )
285
+
286
+ def __init__(self, default, default_factory, init, repr, hash, compare,
287
+ metadata, kw_only):
288
+ self.name = None
289
+ self.type = None
290
+ self.default = default
291
+ self.default_factory = default_factory
292
+ self.init = init
293
+ self.repr = repr
294
+ self.hash = hash
295
+ self.compare = compare
296
+ self.metadata = (_EMPTY_METADATA
297
+ if metadata is None else
298
+ types.MappingProxyType(metadata))
299
+ self.kw_only = kw_only
300
+ self._field_type = None
301
+
302
+ @_recursive_repr
303
+ def __repr__(self):
304
+ return ('Field('
305
+ f'name={self.name!r},'
306
+ f'type={self.type!r},'
307
+ f'default={self.default!r},'
308
+ f'default_factory={self.default_factory!r},'
309
+ f'init={self.init!r},'
310
+ f'repr={self.repr!r},'
311
+ f'hash={self.hash!r},'
312
+ f'compare={self.compare!r},'
313
+ f'metadata={self.metadata!r},'
314
+ f'kw_only={self.kw_only!r},'
315
+ f'_field_type={self._field_type}'
316
+ ')')
317
+
318
+ # This is used to support the PEP 487 __set_name__ protocol in the
319
+ # case where we're using a field that contains a descriptor as a
320
+ # default value. For details on __set_name__, see
321
+ # https://www.python.org/dev/peps/pep-0487/#implementation-details.
322
+ #
323
+ # Note that in _process_class, this Field object is overwritten
324
+ # with the default value, so the end result is a descriptor that
325
+ # had __set_name__ called on it at the right time.
326
+ def __set_name__(self, owner, name):
327
+ func = getattr(type(self.default), '__set_name__', None)
328
+ if func:
329
+ # There is a __set_name__ method on the descriptor, call
330
+ # it.
331
+ func(self.default, owner, name)
332
+
333
+ __class_getitem__ = classmethod(GenericAlias)
334
+
335
+
336
+ class _DataclassParams:
337
+ __slots__ = ('init',
338
+ 'repr',
339
+ 'eq',
340
+ 'order',
341
+ 'unsafe_hash',
342
+ 'frozen',
343
+ )
344
+
345
+ def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
346
+ self.init = init
347
+ self.repr = repr
348
+ self.eq = eq
349
+ self.order = order
350
+ self.unsafe_hash = unsafe_hash
351
+ self.frozen = frozen
352
+
353
+ def __repr__(self):
354
+ return ('_DataclassParams('
355
+ f'init={self.init!r},'
356
+ f'repr={self.repr!r},'
357
+ f'eq={self.eq!r},'
358
+ f'order={self.order!r},'
359
+ f'unsafe_hash={self.unsafe_hash!r},'
360
+ f'frozen={self.frozen!r}'
361
+ ')')
362
+
363
+
364
+ # This function is used instead of exposing Field creation directly,
365
+ # so that a type checker can be told (via overloads) that this is a
366
+ # function whose type depends on its parameters.
367
+ def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True,
368
+ hash=None, compare=True, metadata=None, kw_only=MISSING):
369
+ """Return an object to identify dataclass fields.
370
+
371
+ default is the default value of the field. default_factory is a
372
+ 0-argument function called to initialize a field's value. If init
373
+ is true, the field will be a parameter to the class's __init__()
374
+ function. If repr is true, the field will be included in the
375
+ object's repr(). If hash is true, the field will be included in the
376
+ object's hash(). If compare is true, the field will be used in
377
+ comparison functions. metadata, if specified, must be a mapping
378
+ which is stored but not otherwise examined by dataclass. If kw_only
379
+ is true, the field will become a keyword-only parameter to
380
+ __init__().
381
+
382
+ It is an error to specify both default and default_factory.
383
+ """
384
+
385
+ if default is not MISSING and default_factory is not MISSING:
386
+ raise ValueError('cannot specify both default and default_factory')
387
+ return Field(default, default_factory, init, repr, hash, compare,
388
+ metadata, kw_only)
389
+
390
+
391
+ def _fields_in_init_order(fields):
392
+ # Returns the fields as __init__ will output them. It returns 2 tuples:
393
+ # the first for normal args, and the second for keyword args.
394
+
395
+ return (tuple(f for f in fields if f.init and not f.kw_only),
396
+ tuple(f for f in fields if f.init and f.kw_only)
397
+ )
398
+
399
+
400
+ def _tuple_str(obj_name, fields):
401
+ # Return a string representing each field of obj_name as a tuple
402
+ # member. So, if fields is ['x', 'y'] and obj_name is "self",
403
+ # return "(self.x,self.y)".
404
+
405
+ # Special case for the 0-tuple.
406
+ if not fields:
407
+ return '()'
408
+ # Note the trailing comma, needed if this turns out to be a 1-tuple.
409
+ return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
410
+
411
+
412
+ def _create_fn(name, args, body, *, globals=None, locals=None,
413
+ return_type=MISSING):
414
+ # Note that we may mutate locals. Callers beware!
415
+ # The only callers are internal to this module, so no
416
+ # worries about external callers.
417
+ if locals is None:
418
+ locals = {}
419
+ return_annotation = ''
420
+ if return_type is not MISSING:
421
+ locals['_return_type'] = return_type
422
+ return_annotation = '->_return_type'
423
+ args = ','.join(args)
424
+ body = '\n'.join(f' {b}' for b in body)
425
+
426
+ # Compute the text of the entire function.
427
+ txt = f' def {name}({args}){return_annotation}:\n{body}'
428
+
429
+ local_vars = ', '.join(locals.keys())
430
+ txt = f"def __create_fn__({local_vars}):\n{txt}\n return {name}"
431
+ ns = {}
432
+ exec(txt, globals, ns)
433
+ return ns['__create_fn__'](**locals)
434
+
435
+
436
+ def _field_assign(frozen, name, value, self_name):
437
+ # If we're a frozen class, then assign to our fields in __init__
438
+ # via object.__setattr__. Otherwise, just use a simple
439
+ # assignment.
440
+ #
441
+ # self_name is what "self" is called in this function: don't
442
+ # hard-code "self", since that might be a field name.
443
+ if frozen:
444
+ return f'__dataclass_builtins_object__.__setattr__({self_name},{name!r},{value})'
445
+ return f'{self_name}.{name}={value}'
446
+
447
+
448
+ def _field_init(f, frozen, globals, self_name, slots):
449
+ # Return the text of the line in the body of __init__ that will
450
+ # initialize this field.
451
+
452
+ default_name = f'_dflt_{f.name}'
453
+ if f.default_factory is not MISSING:
454
+ if f.init:
455
+ # This field has a default factory. If a parameter is
456
+ # given, use it. If not, call the factory.
457
+ globals[default_name] = f.default_factory
458
+ value = (f'{default_name}() '
459
+ f'if {f.name} is _HAS_DEFAULT_FACTORY '
460
+ f'else {f.name}')
461
+ else:
462
+ # This is a field that's not in the __init__ params, but
463
+ # has a default factory function. It needs to be
464
+ # initialized here by calling the factory function,
465
+ # because there's no other way to initialize it.
466
+
467
+ # For a field initialized with a default=defaultvalue, the
468
+ # class dict just has the default value
469
+ # (cls.fieldname=defaultvalue). But that won't work for a
470
+ # default factory, the factory must be called in __init__
471
+ # and we must assign that to self.fieldname. We can't
472
+ # fall back to the class dict's value, both because it's
473
+ # not set, and because it might be different per-class
474
+ # (which, after all, is why we have a factory function!).
475
+
476
+ globals[default_name] = f.default_factory
477
+ value = f'{default_name}()'
478
+ else:
479
+ # No default factory.
480
+ if f.init:
481
+ if f.default is MISSING:
482
+ # There's no default, just do an assignment.
483
+ value = f.name
484
+ elif f.default is not MISSING:
485
+ globals[default_name] = f.default
486
+ value = f.name
487
+ else:
488
+ # If the class has slots, then initialize this field.
489
+ if slots and f.default is not MISSING:
490
+ globals[default_name] = f.default
491
+ value = default_name
492
+ else:
493
+ # This field does not need initialization: reading from it will
494
+ # just use the class attribute that contains the default.
495
+ # Signify that to the caller by returning None.
496
+ return None
497
+
498
+ # Only test this now, so that we can create variables for the
499
+ # default. However, return None to signify that we're not going
500
+ # to actually do the assignment statement for InitVars.
501
+ if f._field_type is _FIELD_INITVAR:
502
+ return None
503
+
504
+ # Now, actually generate the field assignment.
505
+ return _field_assign(frozen, f.name, value, self_name)
506
+
507
+
508
+ def _init_param(f):
509
+ # Return the __init__ parameter string for this field. For
510
+ # example, the equivalent of 'x:int=3' (except instead of 'int',
511
+ # reference a variable set to int, and instead of '3', reference a
512
+ # variable set to 3).
513
+ if f.default is MISSING and f.default_factory is MISSING:
514
+ # There's no default, and no default_factory, just output the
515
+ # variable name and type.
516
+ default = ''
517
+ elif f.default is not MISSING:
518
+ # There's a default, this will be the name that's used to look
519
+ # it up.
520
+ default = f'=_dflt_{f.name}'
521
+ elif f.default_factory is not MISSING:
522
+ # There's a factory function. Set a marker.
523
+ default = '=_HAS_DEFAULT_FACTORY'
524
+ return f'{f.name}:_type_{f.name}{default}'
525
+
526
+
527
+ def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init,
528
+ self_name, globals, slots):
529
+ # fields contains both real fields and InitVar pseudo-fields.
530
+
531
+ # Make sure we don't have fields without defaults following fields
532
+ # with defaults. This actually would be caught when exec-ing the
533
+ # function source code, but catching it here gives a better error
534
+ # message, and future-proofs us in case we build up the function
535
+ # using ast.
536
+
537
+ seen_default = False
538
+ for f in std_fields:
539
+ # Only consider the non-kw-only fields in the __init__ call.
540
+ if f.init:
541
+ if not (f.default is MISSING and f.default_factory is MISSING):
542
+ seen_default = True
543
+ elif seen_default:
544
+ raise TypeError(f'non-default argument {f.name!r} '
545
+ 'follows default argument')
546
+
547
+ locals = {f'_type_{f.name}': f.type for f in fields}
548
+ locals.update({
549
+ 'MISSING': MISSING,
550
+ '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY,
551
+ '__dataclass_builtins_object__': object,
552
+ })
553
+
554
+ body_lines = []
555
+ for f in fields:
556
+ line = _field_init(f, frozen, locals, self_name, slots)
557
+ # line is None means that this field doesn't require
558
+ # initialization (it's a pseudo-field). Just skip it.
559
+ if line:
560
+ body_lines.append(line)
561
+
562
+ # Does this class have a post-init function?
563
+ if has_post_init:
564
+ params_str = ','.join(f.name for f in fields
565
+ if f._field_type is _FIELD_INITVAR)
566
+ body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})')
567
+
568
+ # If no body lines, use 'pass'.
569
+ if not body_lines:
570
+ body_lines = ['pass']
571
+
572
+ _init_params = [_init_param(f) for f in std_fields]
573
+ if kw_only_fields:
574
+ # Add the keyword-only args. Because the * can only be added if
575
+ # there's at least one keyword-only arg, there needs to be a test here
576
+ # (instead of just concatenting the lists together).
577
+ _init_params += ['*']
578
+ _init_params += [_init_param(f) for f in kw_only_fields]
579
+ return _create_fn('__init__',
580
+ [self_name] + _init_params,
581
+ body_lines,
582
+ locals=locals,
583
+ globals=globals,
584
+ return_type=None)
585
+
586
+
587
+ def _repr_fn(fields, globals):
588
+ fn = _create_fn('__repr__',
589
+ ('self',),
590
+ ['return self.__class__.__qualname__ + f"(' +
591
+ ', '.join([f"{f.name}={{self.{f.name}!r}}"
592
+ for f in fields]) +
593
+ ')"'],
594
+ globals=globals)
595
+ return _recursive_repr(fn)
596
+
597
+
598
+ def _frozen_get_del_attr(cls, fields, globals):
599
+ locals = {'cls': cls,
600
+ 'FrozenInstanceError': FrozenInstanceError}
601
+ if fields:
602
+ fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
603
+ else:
604
+ # Special case for the zero-length tuple.
605
+ fields_str = '()'
606
+ return (_create_fn('__setattr__',
607
+ ('self', 'name', 'value'),
608
+ (f'if type(self) is cls or name in {fields_str}:',
609
+ ' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
610
+ f'super(cls, self).__setattr__(name, value)'),
611
+ locals=locals,
612
+ globals=globals),
613
+ _create_fn('__delattr__',
614
+ ('self', 'name'),
615
+ (f'if type(self) is cls or name in {fields_str}:',
616
+ ' raise FrozenInstanceError(f"cannot delete field {name!r}")',
617
+ f'super(cls, self).__delattr__(name)'),
618
+ locals=locals,
619
+ globals=globals),
620
+ )
621
+
622
+
623
+ def _cmp_fn(name, op, self_tuple, other_tuple, globals):
624
+ # Create a comparison function. If the fields in the object are
625
+ # named 'x' and 'y', then self_tuple is the string
626
+ # '(self.x,self.y)' and other_tuple is the string
627
+ # '(other.x,other.y)'.
628
+
629
+ return _create_fn(name,
630
+ ('self', 'other'),
631
+ [ 'if other.__class__ is self.__class__:',
632
+ f' return {self_tuple}{op}{other_tuple}',
633
+ 'return NotImplemented'],
634
+ globals=globals)
635
+
636
+
637
+ def _hash_fn(fields, globals):
638
+ self_tuple = _tuple_str('self', fields)
639
+ return _create_fn('__hash__',
640
+ ('self',),
641
+ [f'return hash({self_tuple})'],
642
+ globals=globals)
643
+
644
+
645
+ def _is_classvar(a_type, typing):
646
+ # This test uses a typing internal class, but it's the best way to
647
+ # test if this is a ClassVar.
648
+ return (a_type is typing.ClassVar
649
+ or (type(a_type) is typing._GenericAlias
650
+ and a_type.__origin__ is typing.ClassVar))
651
+
652
+
653
+ def _is_initvar(a_type, dataclasses):
654
+ # The module we're checking against is the module we're
655
+ # currently in (dataclasses.py).
656
+ return (a_type is dataclasses.InitVar
657
+ or type(a_type) is dataclasses.InitVar)
658
+
659
+ def _is_kw_only(a_type, dataclasses):
660
+ return a_type is dataclasses.KW_ONLY
661
+
662
+
663
+ def _is_type(annotation, cls, a_module, a_type, is_type_predicate):
664
+ # Given a type annotation string, does it refer to a_type in
665
+ # a_module? For example, when checking that annotation denotes a
666
+ # ClassVar, then a_module is typing, and a_type is
667
+ # typing.ClassVar.
668
+
669
+ # It's possible to look up a_module given a_type, but it involves
670
+ # looking in sys.modules (again!), and seems like a waste since
671
+ # the caller already knows a_module.
672
+
673
+ # - annotation is a string type annotation
674
+ # - cls is the class that this annotation was found in
675
+ # - a_module is the module we want to match
676
+ # - a_type is the type in that module we want to match
677
+ # - is_type_predicate is a function called with (obj, a_module)
678
+ # that determines if obj is of the desired type.
679
+
680
+ # Since this test does not do a local namespace lookup (and
681
+ # instead only a module (global) lookup), there are some things it
682
+ # gets wrong.
683
+
684
+ # With string annotations, cv0 will be detected as a ClassVar:
685
+ # CV = ClassVar
686
+ # @dataclass
687
+ # class C0:
688
+ # cv0: CV
689
+
690
+ # But in this example cv1 will not be detected as a ClassVar:
691
+ # @dataclass
692
+ # class C1:
693
+ # CV = ClassVar
694
+ # cv1: CV
695
+
696
+ # In C1, the code in this function (_is_type) will look up "CV" in
697
+ # the module and not find it, so it will not consider cv1 as a
698
+ # ClassVar. This is a fairly obscure corner case, and the best
699
+ # way to fix it would be to eval() the string "CV" with the
700
+ # correct global and local namespaces. However that would involve
701
+ # a eval() penalty for every single field of every dataclass
702
+ # that's defined. It was judged not worth it.
703
+
704
+ match = _MODULE_IDENTIFIER_RE.match(annotation)
705
+ if match:
706
+ ns = None
707
+ module_name = match.group(1)
708
+ if not module_name:
709
+ # No module name, assume the class's module did
710
+ # "from dataclasses import InitVar".
711
+ ns = sys.modules.get(cls.__module__).__dict__
712
+ else:
713
+ # Look up module_name in the class's module.
714
+ module = sys.modules.get(cls.__module__)
715
+ if module and module.__dict__.get(module_name) is a_module:
716
+ ns = sys.modules.get(a_type.__module__).__dict__
717
+ if ns and is_type_predicate(ns.get(match.group(2)), a_module):
718
+ return True
719
+ return False
720
+
721
+
722
+ def _get_field(cls, a_name, a_type, default_kw_only):
723
+ # Return a Field object for this field name and type. ClassVars and
724
+ # InitVars are also returned, but marked as such (see f._field_type).
725
+ # default_kw_only is the value of kw_only to use if there isn't a field()
726
+ # that defines it.
727
+
728
+ # If the default value isn't derived from Field, then it's only a
729
+ # normal default value. Convert it to a Field().
730
+ default = getattr(cls, a_name, MISSING)
731
+ if isinstance(default, Field):
732
+ f = default
733
+ else:
734
+ if isinstance(default, types.MemberDescriptorType):
735
+ # This is a field in __slots__, so it has no default value.
736
+ default = MISSING
737
+ f = field(default=default)
738
+
739
+ # Only at this point do we know the name and the type. Set them.
740
+ f.name = a_name
741
+ f.type = a_type
742
+
743
+ # Assume it's a normal field until proven otherwise. We're next
744
+ # going to decide if it's a ClassVar or InitVar, everything else
745
+ # is just a normal field.
746
+ f._field_type = _FIELD
747
+
748
+ # In addition to checking for actual types here, also check for
749
+ # string annotations. get_type_hints() won't always work for us
750
+ # (see https://github.com/python/typing/issues/508 for example),
751
+ # plus it's expensive and would require an eval for every string
752
+ # annotation. So, make a best effort to see if this is a ClassVar
753
+ # or InitVar using regex's and checking that the thing referenced
754
+ # is actually of the correct type.
755
+
756
+ # For the complete discussion, see https://bugs.python.org/issue33453
757
+
758
+ # If typing has not been imported, then it's impossible for any
759
+ # annotation to be a ClassVar. So, only look for ClassVar if
760
+ # typing has been imported by any module (not necessarily cls's
761
+ # module).
762
+ typing = sys.modules.get('typing')
763
+ if typing:
764
+ if (_is_classvar(a_type, typing)
765
+ or (isinstance(f.type, str)
766
+ and _is_type(f.type, cls, typing, typing.ClassVar,
767
+ _is_classvar))):
768
+ f._field_type = _FIELD_CLASSVAR
769
+
770
+ # If the type is InitVar, or if it's a matching string annotation,
771
+ # then it's an InitVar.
772
+ if f._field_type is _FIELD:
773
+ # The module we're checking against is the module we're
774
+ # currently in (dataclasses.py).
775
+ dataclasses = sys.modules[__name__]
776
+ if (_is_initvar(a_type, dataclasses)
777
+ or (isinstance(f.type, str)
778
+ and _is_type(f.type, cls, dataclasses, dataclasses.InitVar,
779
+ _is_initvar))):
780
+ f._field_type = _FIELD_INITVAR
781
+
782
+ # Validations for individual fields. This is delayed until now,
783
+ # instead of in the Field() constructor, since only here do we
784
+ # know the field name, which allows for better error reporting.
785
+
786
+ # Special restrictions for ClassVar and InitVar.
787
+ if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
788
+ if f.default_factory is not MISSING:
789
+ raise TypeError(f'field {f.name} cannot have a '
790
+ 'default factory')
791
+ # Should I check for other field settings? default_factory
792
+ # seems the most serious to check for. Maybe add others. For
793
+ # example, how about init=False (or really,
794
+ # init=<not-the-default-init-value>)? It makes no sense for
795
+ # ClassVar and InitVar to specify init=<anything>.
796
+
797
+ # kw_only validation and assignment.
798
+ if f._field_type in (_FIELD, _FIELD_INITVAR):
799
+ # For real and InitVar fields, if kw_only wasn't specified use the
800
+ # default value.
801
+ if f.kw_only is MISSING:
802
+ f.kw_only = default_kw_only
803
+ else:
804
+ # Make sure kw_only isn't set for ClassVars
805
+ assert f._field_type is _FIELD_CLASSVAR
806
+ if f.kw_only is not MISSING:
807
+ raise TypeError(f'field {f.name} is a ClassVar but specifies '
808
+ 'kw_only')
809
+
810
+ # For real fields, disallow mutable defaults for known types.
811
+ if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
812
+ raise ValueError(f'mutable default {type(f.default)} for field '
813
+ f'{f.name} is not allowed: use default_factory')
814
+
815
+ return f
816
+
817
+ def _set_qualname(cls, value):
818
+ # Ensure that the functions returned from _create_fn uses the proper
819
+ # __qualname__ (the class they belong to).
820
+ if isinstance(value, FunctionType):
821
+ value.__qualname__ = f"{cls.__qualname__}.{value.__name__}"
822
+ return value
823
+
824
+ def _set_new_attribute(cls, name, value):
825
+ # Never overwrites an existing attribute. Returns True if the
826
+ # attribute already exists.
827
+ if name in cls.__dict__:
828
+ return True
829
+ _set_qualname(cls, value)
830
+ setattr(cls, name, value)
831
+ return False
832
+
833
+
834
+ # Decide if/how we're going to create a hash function. Key is
835
+ # (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to
836
+ # take. The common case is to do nothing, so instead of providing a
837
+ # function that is a no-op, use None to signify that.
838
+
839
+ def _hash_set_none(cls, fields, globals):
840
+ return None
841
+
842
+ def _hash_add(cls, fields, globals):
843
+ flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
844
+ return _set_qualname(cls, _hash_fn(flds, globals))
845
+
846
+ def _hash_exception(cls, fields, globals):
847
+ # Raise an exception.
848
+ raise TypeError(f'Cannot overwrite attribute __hash__ '
849
+ f'in class {cls.__name__}')
850
+
851
+ #
852
+ # +-------------------------------------- unsafe_hash?
853
+ # | +------------------------------- eq?
854
+ # | | +------------------------ frozen?
855
+ # | | | +---------------- has-explicit-hash?
856
+ # | | | |
857
+ # | | | | +------- action
858
+ # | | | | |
859
+ # v v v v v
860
+ _hash_action = {(False, False, False, False): None,
861
+ (False, False, False, True ): None,
862
+ (False, False, True, False): None,
863
+ (False, False, True, True ): None,
864
+ (False, True, False, False): _hash_set_none,
865
+ (False, True, False, True ): None,
866
+ (False, True, True, False): _hash_add,
867
+ (False, True, True, True ): None,
868
+ (True, False, False, False): _hash_add,
869
+ (True, False, False, True ): _hash_exception,
870
+ (True, False, True, False): _hash_add,
871
+ (True, False, True, True ): _hash_exception,
872
+ (True, True, False, False): _hash_add,
873
+ (True, True, False, True ): _hash_exception,
874
+ (True, True, True, False): _hash_add,
875
+ (True, True, True, True ): _hash_exception,
876
+ }
877
+ # See https://bugs.python.org/issue32929#msg312829 for an if-statement
878
+ # version of this table.
879
+
880
+
881
+ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
882
+ match_args, kw_only, slots):
883
+ # Now that dicts retain insertion order, there's no reason to use
884
+ # an ordered dict. I am leveraging that ordering here, because
885
+ # derived class fields overwrite base class fields, but the order
886
+ # is defined by the base class, which is found first.
887
+ fields = {}
888
+
889
+ if cls.__module__ in sys.modules:
890
+ globals = sys.modules[cls.__module__].__dict__
891
+ else:
892
+ # Theoretically this can happen if someone writes
893
+ # a custom string to cls.__module__. In which case
894
+ # such dataclass won't be fully introspectable
895
+ # (w.r.t. typing.get_type_hints) but will still function
896
+ # correctly.
897
+ globals = {}
898
+
899
+ setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
900
+ unsafe_hash, frozen))
901
+
902
+ # Find our base classes in reverse MRO order, and exclude
903
+ # ourselves. In reversed order so that more derived classes
904
+ # override earlier field definitions in base classes. As long as
905
+ # we're iterating over them, see if any are frozen.
906
+ any_frozen_base = False
907
+ has_dataclass_bases = False
908
+ for b in cls.__mro__[-1:0:-1]:
909
+ # Only process classes that have been processed by our
910
+ # decorator. That is, they have a _FIELDS attribute.
911
+ base_fields = getattr(b, _FIELDS, None)
912
+ if base_fields is not None:
913
+ has_dataclass_bases = True
914
+ for f in base_fields.values():
915
+ fields[f.name] = f
916
+ if getattr(b, _PARAMS).frozen:
917
+ any_frozen_base = True
918
+
919
+ # Annotations that are defined in this class (not in base
920
+ # classes). If __annotations__ isn't present, then this class
921
+ # adds no new annotations. We use this to compute fields that are
922
+ # added by this class.
923
+ #
924
+ # Fields are found from cls_annotations, which is guaranteed to be
925
+ # ordered. Default values are from class attributes, if a field
926
+ # has a default. If the default value is a Field(), then it
927
+ # contains additional info beyond (and possibly including) the
928
+ # actual default value. Pseudo-fields ClassVars and InitVars are
929
+ # included, despite the fact that they're not real fields. That's
930
+ # dealt with later.
931
+ cls_annotations = cls.__dict__.get('__annotations__', {})
932
+
933
+ # Now find fields in our class. While doing so, validate some
934
+ # things, and set the default values (as class attributes) where
935
+ # we can.
936
+ cls_fields = []
937
+ # Get a reference to this module for the _is_kw_only() test.
938
+ KW_ONLY_seen = False
939
+ dataclasses = sys.modules[__name__]
940
+ for name, type in cls_annotations.items():
941
+ # See if this is a marker to change the value of kw_only.
942
+ if (_is_kw_only(type, dataclasses)
943
+ or (isinstance(type, str)
944
+ and _is_type(type, cls, dataclasses, dataclasses.KW_ONLY,
945
+ _is_kw_only))):
946
+ # Switch the default to kw_only=True, and ignore this
947
+ # annotation: it's not a real field.
948
+ if KW_ONLY_seen:
949
+ raise TypeError(f'{name!r} is KW_ONLY, but KW_ONLY '
950
+ 'has already been specified')
951
+ KW_ONLY_seen = True
952
+ kw_only = True
953
+ else:
954
+ # Otherwise it's a field of some type.
955
+ cls_fields.append(_get_field(cls, name, type, kw_only))
956
+
957
+ for f in cls_fields:
958
+ fields[f.name] = f
959
+
960
+ # If the class attribute (which is the default value for this
961
+ # field) exists and is of type 'Field', replace it with the
962
+ # real default. This is so that normal class introspection
963
+ # sees a real default value, not a Field.
964
+ if isinstance(getattr(cls, f.name, None), Field):
965
+ if f.default is MISSING:
966
+ # If there's no default, delete the class attribute.
967
+ # This happens if we specify field(repr=False), for
968
+ # example (that is, we specified a field object, but
969
+ # no default value). Also if we're using a default
970
+ # factory. The class attribute should not be set at
971
+ # all in the post-processed class.
972
+ delattr(cls, f.name)
973
+ else:
974
+ setattr(cls, f.name, f.default)
975
+
976
+ # Do we have any Field members that don't also have annotations?
977
+ for name, value in cls.__dict__.items():
978
+ if isinstance(value, Field) and not name in cls_annotations:
979
+ raise TypeError(f'{name!r} is a field but has no type annotation')
980
+
981
+ # Check rules that apply if we are derived from any dataclasses.
982
+ if has_dataclass_bases:
983
+ # Raise an exception if any of our bases are frozen, but we're not.
984
+ if any_frozen_base and not frozen:
985
+ raise TypeError('cannot inherit non-frozen dataclass from a '
986
+ 'frozen one')
987
+
988
+ # Raise an exception if we're frozen, but none of our bases are.
989
+ if not any_frozen_base and frozen:
990
+ raise TypeError('cannot inherit frozen dataclass from a '
991
+ 'non-frozen one')
992
+
993
+ # Remember all of the fields on our class (including bases). This
994
+ # also marks this class as being a dataclass.
995
+ setattr(cls, _FIELDS, fields)
996
+
997
+ # Was this class defined with an explicit __hash__? Note that if
998
+ # __eq__ is defined in this class, then python will automatically
999
+ # set __hash__ to None. This is a heuristic, as it's possible
1000
+ # that such a __hash__ == None was not auto-generated, but it
1001
+ # close enough.
1002
+ class_hash = cls.__dict__.get('__hash__', MISSING)
1003
+ has_explicit_hash = not (class_hash is MISSING or
1004
+ (class_hash is None and '__eq__' in cls.__dict__))
1005
+
1006
+ # If we're generating ordering methods, we must be generating the
1007
+ # eq methods.
1008
+ if order and not eq:
1009
+ raise ValueError('eq must be true if order is true')
1010
+
1011
+ # Include InitVars and regular fields (so, not ClassVars). This is
1012
+ # initialized here, outside of the "if init:" test, because std_init_fields
1013
+ # is used with match_args, below.
1014
+ all_init_fields = [f for f in fields.values()
1015
+ if f._field_type in (_FIELD, _FIELD_INITVAR)]
1016
+ (std_init_fields,
1017
+ kw_only_init_fields) = _fields_in_init_order(all_init_fields)
1018
+
1019
+ if init:
1020
+ # Does this class have a post-init function?
1021
+ has_post_init = hasattr(cls, _POST_INIT_NAME)
1022
+
1023
+ _set_new_attribute(cls, '__init__',
1024
+ _init_fn(all_init_fields,
1025
+ std_init_fields,
1026
+ kw_only_init_fields,
1027
+ frozen,
1028
+ has_post_init,
1029
+ # The name to use for the "self"
1030
+ # param in __init__. Use "self"
1031
+ # if possible.
1032
+ '__dataclass_self__' if 'self' in fields
1033
+ else 'self',
1034
+ globals,
1035
+ slots,
1036
+ ))
1037
+
1038
+ # Get the fields as a list, and include only real fields. This is
1039
+ # used in all of the following methods.
1040
+ field_list = [f for f in fields.values() if f._field_type is _FIELD]
1041
+
1042
+ if repr:
1043
+ flds = [f for f in field_list if f.repr]
1044
+ _set_new_attribute(cls, '__repr__', _repr_fn(flds, globals))
1045
+
1046
+ if eq:
1047
+ # Create __eq__ method. There's no need for a __ne__ method,
1048
+ # since python will call __eq__ and negate it.
1049
+ flds = [f for f in field_list if f.compare]
1050
+ self_tuple = _tuple_str('self', flds)
1051
+ other_tuple = _tuple_str('other', flds)
1052
+ _set_new_attribute(cls, '__eq__',
1053
+ _cmp_fn('__eq__', '==',
1054
+ self_tuple, other_tuple,
1055
+ globals=globals))
1056
+
1057
+ if order:
1058
+ # Create and set the ordering methods.
1059
+ flds = [f for f in field_list if f.compare]
1060
+ self_tuple = _tuple_str('self', flds)
1061
+ other_tuple = _tuple_str('other', flds)
1062
+ for name, op in [('__lt__', '<'),
1063
+ ('__le__', '<='),
1064
+ ('__gt__', '>'),
1065
+ ('__ge__', '>='),
1066
+ ]:
1067
+ if _set_new_attribute(cls, name,
1068
+ _cmp_fn(name, op, self_tuple, other_tuple,
1069
+ globals=globals)):
1070
+ raise TypeError(f'Cannot overwrite attribute {name} '
1071
+ f'in class {cls.__name__}. Consider using '
1072
+ 'functools.total_ordering')
1073
+
1074
+ if frozen:
1075
+ for fn in _frozen_get_del_attr(cls, field_list, globals):
1076
+ if _set_new_attribute(cls, fn.__name__, fn):
1077
+ raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
1078
+ f'in class {cls.__name__}')
1079
+
1080
+ # Decide if/how we're going to create a hash function.
1081
+ hash_action = _hash_action[bool(unsafe_hash),
1082
+ bool(eq),
1083
+ bool(frozen),
1084
+ has_explicit_hash]
1085
+ if hash_action:
1086
+ # No need to call _set_new_attribute here, since by the time
1087
+ # we're here the overwriting is unconditional.
1088
+ cls.__hash__ = hash_action(cls, field_list, globals)
1089
+
1090
+ if not getattr(cls, '__doc__'):
1091
+ # Create a class doc-string.
1092
+ cls.__doc__ = (cls.__name__ +
1093
+ str(inspect.signature(cls)).replace(' -> None', ''))
1094
+
1095
+ if match_args:
1096
+ # I could probably compute this once
1097
+ _set_new_attribute(cls, '__match_args__',
1098
+ tuple(f.name for f in std_init_fields))
1099
+
1100
+ if slots:
1101
+ cls = _add_slots(cls, frozen)
1102
+
1103
+ abc.update_abstractmethods(cls)
1104
+
1105
+ return cls
1106
+
1107
+
1108
+ # _dataclass_getstate and _dataclass_setstate are needed for pickling frozen
1109
+ # classes with slots. These could be slighly more performant if we generated
1110
+ # the code instead of iterating over fields. But that can be a project for
1111
+ # another day, if performance becomes an issue.
1112
+ def _dataclass_getstate(self):
1113
+ return [getattr(self, f.name) for f in fields(self)]
1114
+
1115
+
1116
+ def _dataclass_setstate(self, state):
1117
+ for field, value in zip(fields(self), state):
1118
+ # use setattr because dataclass may be frozen
1119
+ object.__setattr__(self, field.name, value)
1120
+
1121
+
1122
+ def _add_slots(cls, is_frozen):
1123
+ # Need to create a new class, since we can't set __slots__
1124
+ # after a class has been created.
1125
+
1126
+ # Make sure __slots__ isn't already set.
1127
+ if '__slots__' in cls.__dict__:
1128
+ raise TypeError(f'{cls.__name__} already specifies __slots__')
1129
+
1130
+ # Create a new dict for our new class.
1131
+ cls_dict = dict(cls.__dict__)
1132
+ field_names = tuple(f.name for f in fields(cls))
1133
+ cls_dict['__slots__'] = field_names
1134
+ for field_name in field_names:
1135
+ # Remove our attributes, if present. They'll still be
1136
+ # available in _MARKER.
1137
+ cls_dict.pop(field_name, None)
1138
+
1139
+ # Remove __dict__ itself.
1140
+ cls_dict.pop('__dict__', None)
1141
+
1142
+ # And finally create the class.
1143
+ qualname = getattr(cls, '__qualname__', None)
1144
+ cls = type(cls)(cls.__name__, cls.__bases__, cls_dict)
1145
+ if qualname is not None:
1146
+ cls.__qualname__ = qualname
1147
+
1148
+ if is_frozen:
1149
+ # Need this for pickling frozen classes with slots.
1150
+ cls.__getstate__ = _dataclass_getstate
1151
+ cls.__setstate__ = _dataclass_setstate
1152
+
1153
+ return cls
1154
+
1155
+
1156
+ def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False,
1157
+ unsafe_hash=False, frozen=False, match_args=True,
1158
+ kw_only=False, slots=False):
1159
+ """Returns the same class as was passed in, with dunder methods
1160
+ added based on the fields defined in the class.
1161
+
1162
+ Examines PEP 526 __annotations__ to determine fields.
1163
+
1164
+ If init is true, an __init__() method is added to the class. If
1165
+ repr is true, a __repr__() method is added. If order is true, rich
1166
+ comparison dunder methods are added. If unsafe_hash is true, a
1167
+ __hash__() method function is added. If frozen is true, fields may
1168
+ not be assigned to after instance creation. If match_args is true,
1169
+ the __match_args__ tuple is added. If kw_only is true, then by
1170
+ default all fields are keyword-only. If slots is true, an
1171
+ __slots__ attribute is added.
1172
+ """
1173
+
1174
+ def wrap(cls):
1175
+ return _process_class(cls, init, repr, eq, order, unsafe_hash,
1176
+ frozen, match_args, kw_only, slots)
1177
+
1178
+ # See if we're being called as @dataclass or @dataclass().
1179
+ if cls is None:
1180
+ # We're called with parens.
1181
+ return wrap
1182
+
1183
+ # We're called as @dataclass without parens.
1184
+ return wrap(cls)
1185
+
1186
+
1187
+ def fields(class_or_instance):
1188
+ """Return a tuple describing the fields of this dataclass.
1189
+
1190
+ Accepts a dataclass or an instance of one. Tuple elements are of
1191
+ type Field.
1192
+ """
1193
+
1194
+ # Might it be worth caching this, per class?
1195
+ try:
1196
+ fields = getattr(class_or_instance, _FIELDS)
1197
+ except AttributeError:
1198
+ raise TypeError('must be called with a dataclass type or instance') from None
1199
+
1200
+ # Exclude pseudo-fields. Note that fields is sorted by insertion
1201
+ # order, so the order of the tuple is as the fields were defined.
1202
+ return tuple(f for f in fields.values() if f._field_type is _FIELD)
1203
+
1204
+
1205
+ def _is_dataclass_instance(obj):
1206
+ """Returns True if obj is an instance of a dataclass."""
1207
+ return hasattr(type(obj), _FIELDS)
1208
+
1209
+
1210
+ def is_dataclass(obj):
1211
+ """Returns True if obj is a dataclass or an instance of a
1212
+ dataclass."""
1213
+ cls = obj if isinstance(obj, type) and not isinstance(obj, GenericAlias) else type(obj)
1214
+ return hasattr(cls, _FIELDS)
1215
+
1216
+
1217
+ def asdict(obj, *, dict_factory=dict):
1218
+ """Return the fields of a dataclass instance as a new dictionary mapping
1219
+ field names to field values.
1220
+
1221
+ Example usage:
1222
+
1223
+ @dataclass
1224
+ class C:
1225
+ x: int
1226
+ y: int
1227
+
1228
+ c = C(1, 2)
1229
+ assert asdict(c) == {'x': 1, 'y': 2}
1230
+
1231
+ If given, 'dict_factory' will be used instead of built-in dict.
1232
+ The function applies recursively to field values that are
1233
+ dataclass instances. This will also look into built-in containers:
1234
+ tuples, lists, and dicts.
1235
+ """
1236
+ if not _is_dataclass_instance(obj):
1237
+ raise TypeError("asdict() should be called on dataclass instances")
1238
+ return _asdict_inner(obj, dict_factory)
1239
+
1240
+
1241
+ def _asdict_inner(obj, dict_factory):
1242
+ if _is_dataclass_instance(obj):
1243
+ result = []
1244
+ for f in fields(obj):
1245
+ value = _asdict_inner(getattr(obj, f.name), dict_factory)
1246
+ result.append((f.name, value))
1247
+ return dict_factory(result)
1248
+ elif isinstance(obj, tuple) and hasattr(obj, '_fields'):
1249
+ # obj is a namedtuple. Recurse into it, but the returned
1250
+ # object is another namedtuple of the same type. This is
1251
+ # similar to how other list- or tuple-derived classes are
1252
+ # treated (see below), but we just need to create them
1253
+ # differently because a namedtuple's __init__ needs to be
1254
+ # called differently (see bpo-34363).
1255
+
1256
+ # I'm not using namedtuple's _asdict()
1257
+ # method, because:
1258
+ # - it does not recurse in to the namedtuple fields and
1259
+ # convert them to dicts (using dict_factory).
1260
+ # - I don't actually want to return a dict here. The main
1261
+ # use case here is json.dumps, and it handles converting
1262
+ # namedtuples to lists. Admittedly we're losing some
1263
+ # information here when we produce a json list instead of a
1264
+ # dict. Note that if we returned dicts here instead of
1265
+ # namedtuples, we could no longer call asdict() on a data
1266
+ # structure where a namedtuple was used as a dict key.
1267
+
1268
+ return type(obj)(*[_asdict_inner(v, dict_factory) for v in obj])
1269
+ elif isinstance(obj, (list, tuple)):
1270
+ # Assume we can create an object of this type by passing in a
1271
+ # generator (which is not true for namedtuples, handled
1272
+ # above).
1273
+ return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
1274
+ elif isinstance(obj, dict):
1275
+ return type(obj)((_asdict_inner(k, dict_factory),
1276
+ _asdict_inner(v, dict_factory))
1277
+ for k, v in obj.items())
1278
+ else:
1279
+ return copy.deepcopy(obj)
1280
+
1281
+
1282
+ def astuple(obj, *, tuple_factory=tuple):
1283
+ """Return the fields of a dataclass instance as a new tuple of field values.
1284
+
1285
+ Example usage::
1286
+
1287
+ @dataclass
1288
+ class C:
1289
+ x: int
1290
+ y: int
1291
+
1292
+ c = C(1, 2)
1293
+ assert astuple(c) == (1, 2)
1294
+
1295
+ If given, 'tuple_factory' will be used instead of built-in tuple.
1296
+ The function applies recursively to field values that are
1297
+ dataclass instances. This will also look into built-in containers:
1298
+ tuples, lists, and dicts.
1299
+ """
1300
+
1301
+ if not _is_dataclass_instance(obj):
1302
+ raise TypeError("astuple() should be called on dataclass instances")
1303
+ return _astuple_inner(obj, tuple_factory)
1304
+
1305
+
1306
+ def _astuple_inner(obj, tuple_factory):
1307
+ if _is_dataclass_instance(obj):
1308
+ result = []
1309
+ for f in fields(obj):
1310
+ value = _astuple_inner(getattr(obj, f.name), tuple_factory)
1311
+ result.append(value)
1312
+ return tuple_factory(result)
1313
+ elif isinstance(obj, tuple) and hasattr(obj, '_fields'):
1314
+ # obj is a namedtuple. Recurse into it, but the returned
1315
+ # object is another namedtuple of the same type. This is
1316
+ # similar to how other list- or tuple-derived classes are
1317
+ # treated (see below), but we just need to create them
1318
+ # differently because a namedtuple's __init__ needs to be
1319
+ # called differently (see bpo-34363).
1320
+ return type(obj)(*[_astuple_inner(v, tuple_factory) for v in obj])
1321
+ elif isinstance(obj, (list, tuple)):
1322
+ # Assume we can create an object of this type by passing in a
1323
+ # generator (which is not true for namedtuples, handled
1324
+ # above).
1325
+ return type(obj)(_astuple_inner(v, tuple_factory) for v in obj)
1326
+ elif isinstance(obj, dict):
1327
+ return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory))
1328
+ for k, v in obj.items())
1329
+ else:
1330
+ return copy.deepcopy(obj)
1331
+
1332
+
1333
+ def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
1334
+ repr=True, eq=True, order=False, unsafe_hash=False,
1335
+ frozen=False, match_args=True, kw_only=False, slots=False):
1336
+ """Return a new dynamically created dataclass.
1337
+
1338
+ The dataclass name will be 'cls_name'. 'fields' is an iterable
1339
+ of either (name), (name, type) or (name, type, Field) objects. If type is
1340
+ omitted, use the string 'typing.Any'. Field objects are created by
1341
+ the equivalent of calling 'field(name, type [, Field-info])'.
1342
+
1343
+ C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))
1344
+
1345
+ is equivalent to:
1346
+
1347
+ @dataclass
1348
+ class C(Base):
1349
+ x: 'typing.Any'
1350
+ y: int
1351
+ z: int = field(init=False)
1352
+
1353
+ For the bases and namespace parameters, see the builtin type() function.
1354
+
1355
+ The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to
1356
+ dataclass().
1357
+ """
1358
+
1359
+ if namespace is None:
1360
+ namespace = {}
1361
+
1362
+ # While we're looking through the field names, validate that they
1363
+ # are identifiers, are not keywords, and not duplicates.
1364
+ seen = set()
1365
+ annotations = {}
1366
+ defaults = {}
1367
+ for item in fields:
1368
+ if isinstance(item, str):
1369
+ name = item
1370
+ tp = 'typing.Any'
1371
+ elif len(item) == 2:
1372
+ name, tp, = item
1373
+ elif len(item) == 3:
1374
+ name, tp, spec = item
1375
+ defaults[name] = spec
1376
+ else:
1377
+ raise TypeError(f'Invalid field: {item!r}')
1378
+
1379
+ if not isinstance(name, str) or not name.isidentifier():
1380
+ raise TypeError(f'Field names must be valid identifiers: {name!r}')
1381
+ if keyword.iskeyword(name):
1382
+ raise TypeError(f'Field names must not be keywords: {name!r}')
1383
+ if name in seen:
1384
+ raise TypeError(f'Field name duplicated: {name!r}')
1385
+
1386
+ seen.add(name)
1387
+ annotations[name] = tp
1388
+
1389
+ # Update 'ns' with the user-supplied namespace plus our calculated values.
1390
+ def exec_body_callback(ns):
1391
+ ns.update(namespace)
1392
+ ns.update(defaults)
1393
+ ns['__annotations__'] = annotations
1394
+
1395
+ # We use `types.new_class()` instead of simply `type()` to allow dynamic creation
1396
+ # of generic dataclasses.
1397
+ cls = types.new_class(cls_name, bases, {}, exec_body_callback)
1398
+
1399
+ # Apply the normal decorator.
1400
+ return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
1401
+ unsafe_hash=unsafe_hash, frozen=frozen,
1402
+ match_args=match_args, kw_only=kw_only, slots=slots)
1403
+
1404
+
1405
+ def replace(obj, /, **changes):
1406
+ """Return a new object replacing specified fields with new values.
1407
+
1408
+ This is especially useful for frozen classes. Example usage:
1409
+
1410
+ @dataclass(frozen=True)
1411
+ class C:
1412
+ x: int
1413
+ y: int
1414
+
1415
+ c = C(1, 2)
1416
+ c1 = replace(c, x=3)
1417
+ assert c1.x == 3 and c1.y == 2
1418
+ """
1419
+
1420
+ # We're going to mutate 'changes', but that's okay because it's a
1421
+ # new dict, even if called with 'replace(obj, **my_changes)'.
1422
+
1423
+ if not _is_dataclass_instance(obj):
1424
+ raise TypeError("replace() should be called on dataclass instances")
1425
+
1426
+ # It's an error to have init=False fields in 'changes'.
1427
+ # If a field is not in 'changes', read its value from the provided obj.
1428
+
1429
+ for f in getattr(obj, _FIELDS).values():
1430
+ # Only consider normal fields or InitVars.
1431
+ if f._field_type is _FIELD_CLASSVAR:
1432
+ continue
1433
+
1434
+ if not f.init:
1435
+ # Error if this field is specified in changes.
1436
+ if f.name in changes:
1437
+ raise ValueError(f'field {f.name} is declared with '
1438
+ 'init=False, it cannot be specified with '
1439
+ 'replace()')
1440
+ continue
1441
+
1442
+ if f.name not in changes:
1443
+ if f._field_type is _FIELD_INITVAR and f.default is MISSING:
1444
+ raise ValueError(f"InitVar {f.name!r} "
1445
+ 'must be specified with replace()')
1446
+ changes[f.name] = getattr(obj, f.name)
1447
+
1448
+ # Create the new object, which calls __init__() and
1449
+ # __post_init__() (if defined), using all of the init fields we've
1450
+ # added and/or left in 'changes'. If there are values supplied in
1451
+ # changes that aren't fields, this will correctly raise a
1452
+ # TypeError.
1453
+ return obj.__class__(**changes)
janus/lib/python3.10/difflib.py ADDED
@@ -0,0 +1,2056 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Module difflib -- helpers for computing deltas between objects.
3
+
4
+ Function get_close_matches(word, possibilities, n=3, cutoff=0.6):
5
+ Use SequenceMatcher to return list of the best "good enough" matches.
6
+
7
+ Function context_diff(a, b):
8
+ For two lists of strings, return a delta in context diff format.
9
+
10
+ Function ndiff(a, b):
11
+ Return a delta: the difference between `a` and `b` (lists of strings).
12
+
13
+ Function restore(delta, which):
14
+ Return one of the two sequences that generated an ndiff delta.
15
+
16
+ Function unified_diff(a, b):
17
+ For two lists of strings, return a delta in unified diff format.
18
+
19
+ Class SequenceMatcher:
20
+ A flexible class for comparing pairs of sequences of any type.
21
+
22
+ Class Differ:
23
+ For producing human-readable deltas from sequences of lines of text.
24
+
25
+ Class HtmlDiff:
26
+ For producing HTML side by side comparison with change highlights.
27
+ """
28
+
29
+ __all__ = ['get_close_matches', 'ndiff', 'restore', 'SequenceMatcher',
30
+ 'Differ','IS_CHARACTER_JUNK', 'IS_LINE_JUNK', 'context_diff',
31
+ 'unified_diff', 'diff_bytes', 'HtmlDiff', 'Match']
32
+
33
+ from heapq import nlargest as _nlargest
34
+ from collections import namedtuple as _namedtuple
35
+ from types import GenericAlias
36
+
37
+ Match = _namedtuple('Match', 'a b size')
38
+
39
+ def _calculate_ratio(matches, length):
40
+ if length:
41
+ return 2.0 * matches / length
42
+ return 1.0
43
+
44
+ class SequenceMatcher:
45
+
46
+ """
47
+ SequenceMatcher is a flexible class for comparing pairs of sequences of
48
+ any type, so long as the sequence elements are hashable. The basic
49
+ algorithm predates, and is a little fancier than, an algorithm
50
+ published in the late 1980's by Ratcliff and Obershelp under the
51
+ hyperbolic name "gestalt pattern matching". The basic idea is to find
52
+ the longest contiguous matching subsequence that contains no "junk"
53
+ elements (R-O doesn't address junk). The same idea is then applied
54
+ recursively to the pieces of the sequences to the left and to the right
55
+ of the matching subsequence. This does not yield minimal edit
56
+ sequences, but does tend to yield matches that "look right" to people.
57
+
58
+ SequenceMatcher tries to compute a "human-friendly diff" between two
59
+ sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the
60
+ longest *contiguous* & junk-free matching subsequence. That's what
61
+ catches peoples' eyes. The Windows(tm) windiff has another interesting
62
+ notion, pairing up elements that appear uniquely in each sequence.
63
+ That, and the method here, appear to yield more intuitive difference
64
+ reports than does diff. This method appears to be the least vulnerable
65
+ to syncing up on blocks of "junk lines", though (like blank lines in
66
+ ordinary text files, or maybe "<P>" lines in HTML files). That may be
67
+ because this is the only method of the 3 that has a *concept* of
68
+ "junk" <wink>.
69
+
70
+ Example, comparing two strings, and considering blanks to be "junk":
71
+
72
+ >>> s = SequenceMatcher(lambda x: x == " ",
73
+ ... "private Thread currentThread;",
74
+ ... "private volatile Thread currentThread;")
75
+ >>>
76
+
77
+ .ratio() returns a float in [0, 1], measuring the "similarity" of the
78
+ sequences. As a rule of thumb, a .ratio() value over 0.6 means the
79
+ sequences are close matches:
80
+
81
+ >>> print(round(s.ratio(), 3))
82
+ 0.866
83
+ >>>
84
+
85
+ If you're only interested in where the sequences match,
86
+ .get_matching_blocks() is handy:
87
+
88
+ >>> for block in s.get_matching_blocks():
89
+ ... print("a[%d] and b[%d] match for %d elements" % block)
90
+ a[0] and b[0] match for 8 elements
91
+ a[8] and b[17] match for 21 elements
92
+ a[29] and b[38] match for 0 elements
93
+
94
+ Note that the last tuple returned by .get_matching_blocks() is always a
95
+ dummy, (len(a), len(b), 0), and this is the only case in which the last
96
+ tuple element (number of elements matched) is 0.
97
+
98
+ If you want to know how to change the first sequence into the second,
99
+ use .get_opcodes():
100
+
101
+ >>> for opcode in s.get_opcodes():
102
+ ... print("%6s a[%d:%d] b[%d:%d]" % opcode)
103
+ equal a[0:8] b[0:8]
104
+ insert a[8:8] b[8:17]
105
+ equal a[8:29] b[17:38]
106
+
107
+ See the Differ class for a fancy human-friendly file differencer, which
108
+ uses SequenceMatcher both to compare sequences of lines, and to compare
109
+ sequences of characters within similar (near-matching) lines.
110
+
111
+ See also function get_close_matches() in this module, which shows how
112
+ simple code building on SequenceMatcher can be used to do useful work.
113
+
114
+ Timing: Basic R-O is cubic time worst case and quadratic time expected
115
+ case. SequenceMatcher is quadratic time for the worst case and has
116
+ expected-case behavior dependent in a complicated way on how many
117
+ elements the sequences have in common; best case time is linear.
118
+ """
119
+
120
+ def __init__(self, isjunk=None, a='', b='', autojunk=True):
121
+ """Construct a SequenceMatcher.
122
+
123
+ Optional arg isjunk is None (the default), or a one-argument
124
+ function that takes a sequence element and returns true iff the
125
+ element is junk. None is equivalent to passing "lambda x: 0", i.e.
126
+ no elements are considered to be junk. For example, pass
127
+ lambda x: x in " \\t"
128
+ if you're comparing lines as sequences of characters, and don't
129
+ want to synch up on blanks or hard tabs.
130
+
131
+ Optional arg a is the first of two sequences to be compared. By
132
+ default, an empty string. The elements of a must be hashable. See
133
+ also .set_seqs() and .set_seq1().
134
+
135
+ Optional arg b is the second of two sequences to be compared. By
136
+ default, an empty string. The elements of b must be hashable. See
137
+ also .set_seqs() and .set_seq2().
138
+
139
+ Optional arg autojunk should be set to False to disable the
140
+ "automatic junk heuristic" that treats popular elements as junk
141
+ (see module documentation for more information).
142
+ """
143
+
144
+ # Members:
145
+ # a
146
+ # first sequence
147
+ # b
148
+ # second sequence; differences are computed as "what do
149
+ # we need to do to 'a' to change it into 'b'?"
150
+ # b2j
151
+ # for x in b, b2j[x] is a list of the indices (into b)
152
+ # at which x appears; junk and popular elements do not appear
153
+ # fullbcount
154
+ # for x in b, fullbcount[x] == the number of times x
155
+ # appears in b; only materialized if really needed (used
156
+ # only for computing quick_ratio())
157
+ # matching_blocks
158
+ # a list of (i, j, k) triples, where a[i:i+k] == b[j:j+k];
159
+ # ascending & non-overlapping in i and in j; terminated by
160
+ # a dummy (len(a), len(b), 0) sentinel
161
+ # opcodes
162
+ # a list of (tag, i1, i2, j1, j2) tuples, where tag is
163
+ # one of
164
+ # 'replace' a[i1:i2] should be replaced by b[j1:j2]
165
+ # 'delete' a[i1:i2] should be deleted
166
+ # 'insert' b[j1:j2] should be inserted
167
+ # 'equal' a[i1:i2] == b[j1:j2]
168
+ # isjunk
169
+ # a user-supplied function taking a sequence element and
170
+ # returning true iff the element is "junk" -- this has
171
+ # subtle but helpful effects on the algorithm, which I'll
172
+ # get around to writing up someday <0.9 wink>.
173
+ # DON'T USE! Only __chain_b uses this. Use "in self.bjunk".
174
+ # bjunk
175
+ # the items in b for which isjunk is True.
176
+ # bpopular
177
+ # nonjunk items in b treated as junk by the heuristic (if used).
178
+
179
+ self.isjunk = isjunk
180
+ self.a = self.b = None
181
+ self.autojunk = autojunk
182
+ self.set_seqs(a, b)
183
+
184
+ def set_seqs(self, a, b):
185
+ """Set the two sequences to be compared.
186
+
187
+ >>> s = SequenceMatcher()
188
+ >>> s.set_seqs("abcd", "bcde")
189
+ >>> s.ratio()
190
+ 0.75
191
+ """
192
+
193
+ self.set_seq1(a)
194
+ self.set_seq2(b)
195
+
196
+ def set_seq1(self, a):
197
+ """Set the first sequence to be compared.
198
+
199
+ The second sequence to be compared is not changed.
200
+
201
+ >>> s = SequenceMatcher(None, "abcd", "bcde")
202
+ >>> s.ratio()
203
+ 0.75
204
+ >>> s.set_seq1("bcde")
205
+ >>> s.ratio()
206
+ 1.0
207
+ >>>
208
+
209
+ SequenceMatcher computes and caches detailed information about the
210
+ second sequence, so if you want to compare one sequence S against
211
+ many sequences, use .set_seq2(S) once and call .set_seq1(x)
212
+ repeatedly for each of the other sequences.
213
+
214
+ See also set_seqs() and set_seq2().
215
+ """
216
+
217
+ if a is self.a:
218
+ return
219
+ self.a = a
220
+ self.matching_blocks = self.opcodes = None
221
+
222
+ def set_seq2(self, b):
223
+ """Set the second sequence to be compared.
224
+
225
+ The first sequence to be compared is not changed.
226
+
227
+ >>> s = SequenceMatcher(None, "abcd", "bcde")
228
+ >>> s.ratio()
229
+ 0.75
230
+ >>> s.set_seq2("abcd")
231
+ >>> s.ratio()
232
+ 1.0
233
+ >>>
234
+
235
+ SequenceMatcher computes and caches detailed information about the
236
+ second sequence, so if you want to compare one sequence S against
237
+ many sequences, use .set_seq2(S) once and call .set_seq1(x)
238
+ repeatedly for each of the other sequences.
239
+
240
+ See also set_seqs() and set_seq1().
241
+ """
242
+
243
+ if b is self.b:
244
+ return
245
+ self.b = b
246
+ self.matching_blocks = self.opcodes = None
247
+ self.fullbcount = None
248
+ self.__chain_b()
249
+
250
+ # For each element x in b, set b2j[x] to a list of the indices in
251
+ # b where x appears; the indices are in increasing order; note that
252
+ # the number of times x appears in b is len(b2j[x]) ...
253
+ # when self.isjunk is defined, junk elements don't show up in this
254
+ # map at all, which stops the central find_longest_match method
255
+ # from starting any matching block at a junk element ...
256
+ # b2j also does not contain entries for "popular" elements, meaning
257
+ # elements that account for more than 1 + 1% of the total elements, and
258
+ # when the sequence is reasonably large (>= 200 elements); this can
259
+ # be viewed as an adaptive notion of semi-junk, and yields an enormous
260
+ # speedup when, e.g., comparing program files with hundreds of
261
+ # instances of "return NULL;" ...
262
+ # note that this is only called when b changes; so for cross-product
263
+ # kinds of matches, it's best to call set_seq2 once, then set_seq1
264
+ # repeatedly
265
+
266
+ def __chain_b(self):
267
+ # Because isjunk is a user-defined (not C) function, and we test
268
+ # for junk a LOT, it's important to minimize the number of calls.
269
+ # Before the tricks described here, __chain_b was by far the most
270
+ # time-consuming routine in the whole module! If anyone sees
271
+ # Jim Roskind, thank him again for profile.py -- I never would
272
+ # have guessed that.
273
+ # The first trick is to build b2j ignoring the possibility
274
+ # of junk. I.e., we don't call isjunk at all yet. Throwing
275
+ # out the junk later is much cheaper than building b2j "right"
276
+ # from the start.
277
+ b = self.b
278
+ self.b2j = b2j = {}
279
+
280
+ for i, elt in enumerate(b):
281
+ indices = b2j.setdefault(elt, [])
282
+ indices.append(i)
283
+
284
+ # Purge junk elements
285
+ self.bjunk = junk = set()
286
+ isjunk = self.isjunk
287
+ if isjunk:
288
+ for elt in b2j.keys():
289
+ if isjunk(elt):
290
+ junk.add(elt)
291
+ for elt in junk: # separate loop avoids separate list of keys
292
+ del b2j[elt]
293
+
294
+ # Purge popular elements that are not junk
295
+ self.bpopular = popular = set()
296
+ n = len(b)
297
+ if self.autojunk and n >= 200:
298
+ ntest = n // 100 + 1
299
+ for elt, idxs in b2j.items():
300
+ if len(idxs) > ntest:
301
+ popular.add(elt)
302
+ for elt in popular: # ditto; as fast for 1% deletion
303
+ del b2j[elt]
304
+
305
+ def find_longest_match(self, alo=0, ahi=None, blo=0, bhi=None):
306
+ """Find longest matching block in a[alo:ahi] and b[blo:bhi].
307
+
308
+ By default it will find the longest match in the entirety of a and b.
309
+
310
+ If isjunk is not defined:
311
+
312
+ Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
313
+ alo <= i <= i+k <= ahi
314
+ blo <= j <= j+k <= bhi
315
+ and for all (i',j',k') meeting those conditions,
316
+ k >= k'
317
+ i <= i'
318
+ and if i == i', j <= j'
319
+
320
+ In other words, of all maximal matching blocks, return one that
321
+ starts earliest in a, and of all those maximal matching blocks that
322
+ start earliest in a, return the one that starts earliest in b.
323
+
324
+ >>> s = SequenceMatcher(None, " abcd", "abcd abcd")
325
+ >>> s.find_longest_match(0, 5, 0, 9)
326
+ Match(a=0, b=4, size=5)
327
+
328
+ If isjunk is defined, first the longest matching block is
329
+ determined as above, but with the additional restriction that no
330
+ junk element appears in the block. Then that block is extended as
331
+ far as possible by matching (only) junk elements on both sides. So
332
+ the resulting block never matches on junk except as identical junk
333
+ happens to be adjacent to an "interesting" match.
334
+
335
+ Here's the same example as before, but considering blanks to be
336
+ junk. That prevents " abcd" from matching the " abcd" at the tail
337
+ end of the second sequence directly. Instead only the "abcd" can
338
+ match, and matches the leftmost "abcd" in the second sequence:
339
+
340
+ >>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd")
341
+ >>> s.find_longest_match(0, 5, 0, 9)
342
+ Match(a=1, b=0, size=4)
343
+
344
+ If no blocks match, return (alo, blo, 0).
345
+
346
+ >>> s = SequenceMatcher(None, "ab", "c")
347
+ >>> s.find_longest_match(0, 2, 0, 1)
348
+ Match(a=0, b=0, size=0)
349
+ """
350
+
351
+ # CAUTION: stripping common prefix or suffix would be incorrect.
352
+ # E.g.,
353
+ # ab
354
+ # acab
355
+ # Longest matching block is "ab", but if common prefix is
356
+ # stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
357
+ # strip, so ends up claiming that ab is changed to acab by
358
+ # inserting "ca" in the middle. That's minimal but unintuitive:
359
+ # "it's obvious" that someone inserted "ac" at the front.
360
+ # Windiff ends up at the same place as diff, but by pairing up
361
+ # the unique 'b's and then matching the first two 'a's.
362
+
363
+ a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.bjunk.__contains__
364
+ if ahi is None:
365
+ ahi = len(a)
366
+ if bhi is None:
367
+ bhi = len(b)
368
+ besti, bestj, bestsize = alo, blo, 0
369
+ # find longest junk-free match
370
+ # during an iteration of the loop, j2len[j] = length of longest
371
+ # junk-free match ending with a[i-1] and b[j]
372
+ j2len = {}
373
+ nothing = []
374
+ for i in range(alo, ahi):
375
+ # look at all instances of a[i] in b; note that because
376
+ # b2j has no junk keys, the loop is skipped if a[i] is junk
377
+ j2lenget = j2len.get
378
+ newj2len = {}
379
+ for j in b2j.get(a[i], nothing):
380
+ # a[i] matches b[j]
381
+ if j < blo:
382
+ continue
383
+ if j >= bhi:
384
+ break
385
+ k = newj2len[j] = j2lenget(j-1, 0) + 1
386
+ if k > bestsize:
387
+ besti, bestj, bestsize = i-k+1, j-k+1, k
388
+ j2len = newj2len
389
+
390
+ # Extend the best by non-junk elements on each end. In particular,
391
+ # "popular" non-junk elements aren't in b2j, which greatly speeds
392
+ # the inner loop above, but also means "the best" match so far
393
+ # doesn't contain any junk *or* popular non-junk elements.
394
+ while besti > alo and bestj > blo and \
395
+ not isbjunk(b[bestj-1]) and \
396
+ a[besti-1] == b[bestj-1]:
397
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
398
+ while besti+bestsize < ahi and bestj+bestsize < bhi and \
399
+ not isbjunk(b[bestj+bestsize]) and \
400
+ a[besti+bestsize] == b[bestj+bestsize]:
401
+ bestsize += 1
402
+
403
+ # Now that we have a wholly interesting match (albeit possibly
404
+ # empty!), we may as well suck up the matching junk on each
405
+ # side of it too. Can't think of a good reason not to, and it
406
+ # saves post-processing the (possibly considerable) expense of
407
+ # figuring out what to do with it. In the case of an empty
408
+ # interesting match, this is clearly the right thing to do,
409
+ # because no other kind of match is possible in the regions.
410
+ while besti > alo and bestj > blo and \
411
+ isbjunk(b[bestj-1]) and \
412
+ a[besti-1] == b[bestj-1]:
413
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
414
+ while besti+bestsize < ahi and bestj+bestsize < bhi and \
415
+ isbjunk(b[bestj+bestsize]) and \
416
+ a[besti+bestsize] == b[bestj+bestsize]:
417
+ bestsize = bestsize + 1
418
+
419
+ return Match(besti, bestj, bestsize)
420
+
421
+ def get_matching_blocks(self):
422
+ """Return list of triples describing matching subsequences.
423
+
424
+ Each triple is of the form (i, j, n), and means that
425
+ a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in
426
+ i and in j. New in Python 2.5, it's also guaranteed that if
427
+ (i, j, n) and (i', j', n') are adjacent triples in the list, and
428
+ the second is not the last triple in the list, then i+n != i' or
429
+ j+n != j'. IOW, adjacent triples never describe adjacent equal
430
+ blocks.
431
+
432
+ The last triple is a dummy, (len(a), len(b), 0), and is the only
433
+ triple with n==0.
434
+
435
+ >>> s = SequenceMatcher(None, "abxcd", "abcd")
436
+ >>> list(s.get_matching_blocks())
437
+ [Match(a=0, b=0, size=2), Match(a=3, b=2, size=2), Match(a=5, b=4, size=0)]
438
+ """
439
+
440
+ if self.matching_blocks is not None:
441
+ return self.matching_blocks
442
+ la, lb = len(self.a), len(self.b)
443
+
444
+ # This is most naturally expressed as a recursive algorithm, but
445
+ # at least one user bumped into extreme use cases that exceeded
446
+ # the recursion limit on their box. So, now we maintain a list
447
+ # ('queue`) of blocks we still need to look at, and append partial
448
+ # results to `matching_blocks` in a loop; the matches are sorted
449
+ # at the end.
450
+ queue = [(0, la, 0, lb)]
451
+ matching_blocks = []
452
+ while queue:
453
+ alo, ahi, blo, bhi = queue.pop()
454
+ i, j, k = x = self.find_longest_match(alo, ahi, blo, bhi)
455
+ # a[alo:i] vs b[blo:j] unknown
456
+ # a[i:i+k] same as b[j:j+k]
457
+ # a[i+k:ahi] vs b[j+k:bhi] unknown
458
+ if k: # if k is 0, there was no matching block
459
+ matching_blocks.append(x)
460
+ if alo < i and blo < j:
461
+ queue.append((alo, i, blo, j))
462
+ if i+k < ahi and j+k < bhi:
463
+ queue.append((i+k, ahi, j+k, bhi))
464
+ matching_blocks.sort()
465
+
466
+ # It's possible that we have adjacent equal blocks in the
467
+ # matching_blocks list now. Starting with 2.5, this code was added
468
+ # to collapse them.
469
+ i1 = j1 = k1 = 0
470
+ non_adjacent = []
471
+ for i2, j2, k2 in matching_blocks:
472
+ # Is this block adjacent to i1, j1, k1?
473
+ if i1 + k1 == i2 and j1 + k1 == j2:
474
+ # Yes, so collapse them -- this just increases the length of
475
+ # the first block by the length of the second, and the first
476
+ # block so lengthened remains the block to compare against.
477
+ k1 += k2
478
+ else:
479
+ # Not adjacent. Remember the first block (k1==0 means it's
480
+ # the dummy we started with), and make the second block the
481
+ # new block to compare against.
482
+ if k1:
483
+ non_adjacent.append((i1, j1, k1))
484
+ i1, j1, k1 = i2, j2, k2
485
+ if k1:
486
+ non_adjacent.append((i1, j1, k1))
487
+
488
+ non_adjacent.append( (la, lb, 0) )
489
+ self.matching_blocks = list(map(Match._make, non_adjacent))
490
+ return self.matching_blocks
491
+
492
+ def get_opcodes(self):
493
+ """Return list of 5-tuples describing how to turn a into b.
494
+
495
+ Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple
496
+ has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
497
+ tuple preceding it, and likewise for j1 == the previous j2.
498
+
499
+ The tags are strings, with these meanings:
500
+
501
+ 'replace': a[i1:i2] should be replaced by b[j1:j2]
502
+ 'delete': a[i1:i2] should be deleted.
503
+ Note that j1==j2 in this case.
504
+ 'insert': b[j1:j2] should be inserted at a[i1:i1].
505
+ Note that i1==i2 in this case.
506
+ 'equal': a[i1:i2] == b[j1:j2]
507
+
508
+ >>> a = "qabxcd"
509
+ >>> b = "abycdf"
510
+ >>> s = SequenceMatcher(None, a, b)
511
+ >>> for tag, i1, i2, j1, j2 in s.get_opcodes():
512
+ ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" %
513
+ ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2])))
514
+ delete a[0:1] (q) b[0:0] ()
515
+ equal a[1:3] (ab) b[0:2] (ab)
516
+ replace a[3:4] (x) b[2:3] (y)
517
+ equal a[4:6] (cd) b[3:5] (cd)
518
+ insert a[6:6] () b[5:6] (f)
519
+ """
520
+
521
+ if self.opcodes is not None:
522
+ return self.opcodes
523
+ i = j = 0
524
+ self.opcodes = answer = []
525
+ for ai, bj, size in self.get_matching_blocks():
526
+ # invariant: we've pumped out correct diffs to change
527
+ # a[:i] into b[:j], and the next matching block is
528
+ # a[ai:ai+size] == b[bj:bj+size]. So we need to pump
529
+ # out a diff to change a[i:ai] into b[j:bj], pump out
530
+ # the matching block, and move (i,j) beyond the match
531
+ tag = ''
532
+ if i < ai and j < bj:
533
+ tag = 'replace'
534
+ elif i < ai:
535
+ tag = 'delete'
536
+ elif j < bj:
537
+ tag = 'insert'
538
+ if tag:
539
+ answer.append( (tag, i, ai, j, bj) )
540
+ i, j = ai+size, bj+size
541
+ # the list of matching blocks is terminated by a
542
+ # sentinel with size 0
543
+ if size:
544
+ answer.append( ('equal', ai, i, bj, j) )
545
+ return answer
546
+
547
+ def get_grouped_opcodes(self, n=3):
548
+ """ Isolate change clusters by eliminating ranges with no changes.
549
+
550
+ Return a generator of groups with up to n lines of context.
551
+ Each group is in the same format as returned by get_opcodes().
552
+
553
+ >>> from pprint import pprint
554
+ >>> a = list(map(str, range(1,40)))
555
+ >>> b = a[:]
556
+ >>> b[8:8] = ['i'] # Make an insertion
557
+ >>> b[20] += 'x' # Make a replacement
558
+ >>> b[23:28] = [] # Make a deletion
559
+ >>> b[30] += 'y' # Make another replacement
560
+ >>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes()))
561
+ [[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)],
562
+ [('equal', 16, 19, 17, 20),
563
+ ('replace', 19, 20, 20, 21),
564
+ ('equal', 20, 22, 21, 23),
565
+ ('delete', 22, 27, 23, 23),
566
+ ('equal', 27, 30, 23, 26)],
567
+ [('equal', 31, 34, 27, 30),
568
+ ('replace', 34, 35, 30, 31),
569
+ ('equal', 35, 38, 31, 34)]]
570
+ """
571
+
572
+ codes = self.get_opcodes()
573
+ if not codes:
574
+ codes = [("equal", 0, 1, 0, 1)]
575
+ # Fixup leading and trailing groups if they show no changes.
576
+ if codes[0][0] == 'equal':
577
+ tag, i1, i2, j1, j2 = codes[0]
578
+ codes[0] = tag, max(i1, i2-n), i2, max(j1, j2-n), j2
579
+ if codes[-1][0] == 'equal':
580
+ tag, i1, i2, j1, j2 = codes[-1]
581
+ codes[-1] = tag, i1, min(i2, i1+n), j1, min(j2, j1+n)
582
+
583
+ nn = n + n
584
+ group = []
585
+ for tag, i1, i2, j1, j2 in codes:
586
+ # End the current group and start a new one whenever
587
+ # there is a large range with no changes.
588
+ if tag == 'equal' and i2-i1 > nn:
589
+ group.append((tag, i1, min(i2, i1+n), j1, min(j2, j1+n)))
590
+ yield group
591
+ group = []
592
+ i1, j1 = max(i1, i2-n), max(j1, j2-n)
593
+ group.append((tag, i1, i2, j1 ,j2))
594
+ if group and not (len(group)==1 and group[0][0] == 'equal'):
595
+ yield group
596
+
597
+ def ratio(self):
598
+ """Return a measure of the sequences' similarity (float in [0,1]).
599
+
600
+ Where T is the total number of elements in both sequences, and
601
+ M is the number of matches, this is 2.0*M / T.
602
+ Note that this is 1 if the sequences are identical, and 0 if
603
+ they have nothing in common.
604
+
605
+ .ratio() is expensive to compute if you haven't already computed
606
+ .get_matching_blocks() or .get_opcodes(), in which case you may
607
+ want to try .quick_ratio() or .real_quick_ratio() first to get an
608
+ upper bound.
609
+
610
+ >>> s = SequenceMatcher(None, "abcd", "bcde")
611
+ >>> s.ratio()
612
+ 0.75
613
+ >>> s.quick_ratio()
614
+ 0.75
615
+ >>> s.real_quick_ratio()
616
+ 1.0
617
+ """
618
+
619
+ matches = sum(triple[-1] for triple in self.get_matching_blocks())
620
+ return _calculate_ratio(matches, len(self.a) + len(self.b))
621
+
622
+ def quick_ratio(self):
623
+ """Return an upper bound on ratio() relatively quickly.
624
+
625
+ This isn't defined beyond that it is an upper bound on .ratio(), and
626
+ is faster to compute.
627
+ """
628
+
629
+ # viewing a and b as multisets, set matches to the cardinality
630
+ # of their intersection; this counts the number of matches
631
+ # without regard to order, so is clearly an upper bound
632
+ if self.fullbcount is None:
633
+ self.fullbcount = fullbcount = {}
634
+ for elt in self.b:
635
+ fullbcount[elt] = fullbcount.get(elt, 0) + 1
636
+ fullbcount = self.fullbcount
637
+ # avail[x] is the number of times x appears in 'b' less the
638
+ # number of times we've seen it in 'a' so far ... kinda
639
+ avail = {}
640
+ availhas, matches = avail.__contains__, 0
641
+ for elt in self.a:
642
+ if availhas(elt):
643
+ numb = avail[elt]
644
+ else:
645
+ numb = fullbcount.get(elt, 0)
646
+ avail[elt] = numb - 1
647
+ if numb > 0:
648
+ matches = matches + 1
649
+ return _calculate_ratio(matches, len(self.a) + len(self.b))
650
+
651
+ def real_quick_ratio(self):
652
+ """Return an upper bound on ratio() very quickly.
653
+
654
+ This isn't defined beyond that it is an upper bound on .ratio(), and
655
+ is faster to compute than either .ratio() or .quick_ratio().
656
+ """
657
+
658
+ la, lb = len(self.a), len(self.b)
659
+ # can't have more matches than the number of elements in the
660
+ # shorter sequence
661
+ return _calculate_ratio(min(la, lb), la + lb)
662
+
663
+ __class_getitem__ = classmethod(GenericAlias)
664
+
665
+
666
+ def get_close_matches(word, possibilities, n=3, cutoff=0.6):
667
+ """Use SequenceMatcher to return list of the best "good enough" matches.
668
+
669
+ word is a sequence for which close matches are desired (typically a
670
+ string).
671
+
672
+ possibilities is a list of sequences against which to match word
673
+ (typically a list of strings).
674
+
675
+ Optional arg n (default 3) is the maximum number of close matches to
676
+ return. n must be > 0.
677
+
678
+ Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities
679
+ that don't score at least that similar to word are ignored.
680
+
681
+ The best (no more than n) matches among the possibilities are returned
682
+ in a list, sorted by similarity score, most similar first.
683
+
684
+ >>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"])
685
+ ['apple', 'ape']
686
+ >>> import keyword as _keyword
687
+ >>> get_close_matches("wheel", _keyword.kwlist)
688
+ ['while']
689
+ >>> get_close_matches("Apple", _keyword.kwlist)
690
+ []
691
+ >>> get_close_matches("accept", _keyword.kwlist)
692
+ ['except']
693
+ """
694
+
695
+ if not n > 0:
696
+ raise ValueError("n must be > 0: %r" % (n,))
697
+ if not 0.0 <= cutoff <= 1.0:
698
+ raise ValueError("cutoff must be in [0.0, 1.0]: %r" % (cutoff,))
699
+ result = []
700
+ s = SequenceMatcher()
701
+ s.set_seq2(word)
702
+ for x in possibilities:
703
+ s.set_seq1(x)
704
+ if s.real_quick_ratio() >= cutoff and \
705
+ s.quick_ratio() >= cutoff and \
706
+ s.ratio() >= cutoff:
707
+ result.append((s.ratio(), x))
708
+
709
+ # Move the best scorers to head of list
710
+ result = _nlargest(n, result)
711
+ # Strip scores for the best n matches
712
+ return [x for score, x in result]
713
+
714
+
715
+ def _keep_original_ws(s, tag_s):
716
+ """Replace whitespace with the original whitespace characters in `s`"""
717
+ return ''.join(
718
+ c if tag_c == " " and c.isspace() else tag_c
719
+ for c, tag_c in zip(s, tag_s)
720
+ )
721
+
722
+
723
+
724
+ class Differ:
725
+ r"""
726
+ Differ is a class for comparing sequences of lines of text, and
727
+ producing human-readable differences or deltas. Differ uses
728
+ SequenceMatcher both to compare sequences of lines, and to compare
729
+ sequences of characters within similar (near-matching) lines.
730
+
731
+ Each line of a Differ delta begins with a two-letter code:
732
+
733
+ '- ' line unique to sequence 1
734
+ '+ ' line unique to sequence 2
735
+ ' ' line common to both sequences
736
+ '? ' line not present in either input sequence
737
+
738
+ Lines beginning with '? ' attempt to guide the eye to intraline
739
+ differences, and were not present in either input sequence. These lines
740
+ can be confusing if the sequences contain tab characters.
741
+
742
+ Note that Differ makes no claim to produce a *minimal* diff. To the
743
+ contrary, minimal diffs are often counter-intuitive, because they synch
744
+ up anywhere possible, sometimes accidental matches 100 pages apart.
745
+ Restricting synch points to contiguous matches preserves some notion of
746
+ locality, at the occasional cost of producing a longer diff.
747
+
748
+ Example: Comparing two texts.
749
+
750
+ First we set up the texts, sequences of individual single-line strings
751
+ ending with newlines (such sequences can also be obtained from the
752
+ `readlines()` method of file-like objects):
753
+
754
+ >>> text1 = ''' 1. Beautiful is better than ugly.
755
+ ... 2. Explicit is better than implicit.
756
+ ... 3. Simple is better than complex.
757
+ ... 4. Complex is better than complicated.
758
+ ... '''.splitlines(keepends=True)
759
+ >>> len(text1)
760
+ 4
761
+ >>> text1[0][-1]
762
+ '\n'
763
+ >>> text2 = ''' 1. Beautiful is better than ugly.
764
+ ... 3. Simple is better than complex.
765
+ ... 4. Complicated is better than complex.
766
+ ... 5. Flat is better than nested.
767
+ ... '''.splitlines(keepends=True)
768
+
769
+ Next we instantiate a Differ object:
770
+
771
+ >>> d = Differ()
772
+
773
+ Note that when instantiating a Differ object we may pass functions to
774
+ filter out line and character 'junk'. See Differ.__init__ for details.
775
+
776
+ Finally, we compare the two:
777
+
778
+ >>> result = list(d.compare(text1, text2))
779
+
780
+ 'result' is a list of strings, so let's pretty-print it:
781
+
782
+ >>> from pprint import pprint as _pprint
783
+ >>> _pprint(result)
784
+ [' 1. Beautiful is better than ugly.\n',
785
+ '- 2. Explicit is better than implicit.\n',
786
+ '- 3. Simple is better than complex.\n',
787
+ '+ 3. Simple is better than complex.\n',
788
+ '? ++\n',
789
+ '- 4. Complex is better than complicated.\n',
790
+ '? ^ ---- ^\n',
791
+ '+ 4. Complicated is better than complex.\n',
792
+ '? ++++ ^ ^\n',
793
+ '+ 5. Flat is better than nested.\n']
794
+
795
+ As a single multi-line string it looks like this:
796
+
797
+ >>> print(''.join(result), end="")
798
+ 1. Beautiful is better than ugly.
799
+ - 2. Explicit is better than implicit.
800
+ - 3. Simple is better than complex.
801
+ + 3. Simple is better than complex.
802
+ ? ++
803
+ - 4. Complex is better than complicated.
804
+ ? ^ ---- ^
805
+ + 4. Complicated is better than complex.
806
+ ? ++++ ^ ^
807
+ + 5. Flat is better than nested.
808
+ """
809
+
810
+ def __init__(self, linejunk=None, charjunk=None):
811
+ """
812
+ Construct a text differencer, with optional filters.
813
+
814
+ The two optional keyword parameters are for filter functions:
815
+
816
+ - `linejunk`: A function that should accept a single string argument,
817
+ and return true iff the string is junk. The module-level function
818
+ `IS_LINE_JUNK` may be used to filter out lines without visible
819
+ characters, except for at most one splat ('#'). It is recommended
820
+ to leave linejunk None; the underlying SequenceMatcher class has
821
+ an adaptive notion of "noise" lines that's better than any static
822
+ definition the author has ever been able to craft.
823
+
824
+ - `charjunk`: A function that should accept a string of length 1. The
825
+ module-level function `IS_CHARACTER_JUNK` may be used to filter out
826
+ whitespace characters (a blank or tab; **note**: bad idea to include
827
+ newline in this!). Use of IS_CHARACTER_JUNK is recommended.
828
+ """
829
+
830
+ self.linejunk = linejunk
831
+ self.charjunk = charjunk
832
+
833
+ def compare(self, a, b):
834
+ r"""
835
+ Compare two sequences of lines; generate the resulting delta.
836
+
837
+ Each sequence must contain individual single-line strings ending with
838
+ newlines. Such sequences can be obtained from the `readlines()` method
839
+ of file-like objects. The delta generated also consists of newline-
840
+ terminated strings, ready to be printed as-is via the writelines()
841
+ method of a file-like object.
842
+
843
+ Example:
844
+
845
+ >>> print(''.join(Differ().compare('one\ntwo\nthree\n'.splitlines(True),
846
+ ... 'ore\ntree\nemu\n'.splitlines(True))),
847
+ ... end="")
848
+ - one
849
+ ? ^
850
+ + ore
851
+ ? ^
852
+ - two
853
+ - three
854
+ ? -
855
+ + tree
856
+ + emu
857
+ """
858
+
859
+ cruncher = SequenceMatcher(self.linejunk, a, b)
860
+ for tag, alo, ahi, blo, bhi in cruncher.get_opcodes():
861
+ if tag == 'replace':
862
+ g = self._fancy_replace(a, alo, ahi, b, blo, bhi)
863
+ elif tag == 'delete':
864
+ g = self._dump('-', a, alo, ahi)
865
+ elif tag == 'insert':
866
+ g = self._dump('+', b, blo, bhi)
867
+ elif tag == 'equal':
868
+ g = self._dump(' ', a, alo, ahi)
869
+ else:
870
+ raise ValueError('unknown tag %r' % (tag,))
871
+
872
+ yield from g
873
+
874
+ def _dump(self, tag, x, lo, hi):
875
+ """Generate comparison results for a same-tagged range."""
876
+ for i in range(lo, hi):
877
+ yield '%s %s' % (tag, x[i])
878
+
879
+ def _plain_replace(self, a, alo, ahi, b, blo, bhi):
880
+ assert alo < ahi and blo < bhi
881
+ # dump the shorter block first -- reduces the burden on short-term
882
+ # memory if the blocks are of very different sizes
883
+ if bhi - blo < ahi - alo:
884
+ first = self._dump('+', b, blo, bhi)
885
+ second = self._dump('-', a, alo, ahi)
886
+ else:
887
+ first = self._dump('-', a, alo, ahi)
888
+ second = self._dump('+', b, blo, bhi)
889
+
890
+ for g in first, second:
891
+ yield from g
892
+
893
+ def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
894
+ r"""
895
+ When replacing one block of lines with another, search the blocks
896
+ for *similar* lines; the best-matching pair (if any) is used as a
897
+ synch point, and intraline difference marking is done on the
898
+ similar pair. Lots of work, but often worth it.
899
+
900
+ Example:
901
+
902
+ >>> d = Differ()
903
+ >>> results = d._fancy_replace(['abcDefghiJkl\n'], 0, 1,
904
+ ... ['abcdefGhijkl\n'], 0, 1)
905
+ >>> print(''.join(results), end="")
906
+ - abcDefghiJkl
907
+ ? ^ ^ ^
908
+ + abcdefGhijkl
909
+ ? ^ ^ ^
910
+ """
911
+
912
+ # don't synch up unless the lines have a similarity score of at
913
+ # least cutoff; best_ratio tracks the best score seen so far
914
+ best_ratio, cutoff = 0.74, 0.75
915
+ cruncher = SequenceMatcher(self.charjunk)
916
+ eqi, eqj = None, None # 1st indices of equal lines (if any)
917
+
918
+ # search for the pair that matches best without being identical
919
+ # (identical lines must be junk lines, & we don't want to synch up
920
+ # on junk -- unless we have to)
921
+ for j in range(blo, bhi):
922
+ bj = b[j]
923
+ cruncher.set_seq2(bj)
924
+ for i in range(alo, ahi):
925
+ ai = a[i]
926
+ if ai == bj:
927
+ if eqi is None:
928
+ eqi, eqj = i, j
929
+ continue
930
+ cruncher.set_seq1(ai)
931
+ # computing similarity is expensive, so use the quick
932
+ # upper bounds first -- have seen this speed up messy
933
+ # compares by a factor of 3.
934
+ # note that ratio() is only expensive to compute the first
935
+ # time it's called on a sequence pair; the expensive part
936
+ # of the computation is cached by cruncher
937
+ if cruncher.real_quick_ratio() > best_ratio and \
938
+ cruncher.quick_ratio() > best_ratio and \
939
+ cruncher.ratio() > best_ratio:
940
+ best_ratio, best_i, best_j = cruncher.ratio(), i, j
941
+ if best_ratio < cutoff:
942
+ # no non-identical "pretty close" pair
943
+ if eqi is None:
944
+ # no identical pair either -- treat it as a straight replace
945
+ yield from self._plain_replace(a, alo, ahi, b, blo, bhi)
946
+ return
947
+ # no close pair, but an identical pair -- synch up on that
948
+ best_i, best_j, best_ratio = eqi, eqj, 1.0
949
+ else:
950
+ # there's a close pair, so forget the identical pair (if any)
951
+ eqi = None
952
+
953
+ # a[best_i] very similar to b[best_j]; eqi is None iff they're not
954
+ # identical
955
+
956
+ # pump out diffs from before the synch point
957
+ yield from self._fancy_helper(a, alo, best_i, b, blo, best_j)
958
+
959
+ # do intraline marking on the synch pair
960
+ aelt, belt = a[best_i], b[best_j]
961
+ if eqi is None:
962
+ # pump out a '-', '?', '+', '?' quad for the synched lines
963
+ atags = btags = ""
964
+ cruncher.set_seqs(aelt, belt)
965
+ for tag, ai1, ai2, bj1, bj2 in cruncher.get_opcodes():
966
+ la, lb = ai2 - ai1, bj2 - bj1
967
+ if tag == 'replace':
968
+ atags += '^' * la
969
+ btags += '^' * lb
970
+ elif tag == 'delete':
971
+ atags += '-' * la
972
+ elif tag == 'insert':
973
+ btags += '+' * lb
974
+ elif tag == 'equal':
975
+ atags += ' ' * la
976
+ btags += ' ' * lb
977
+ else:
978
+ raise ValueError('unknown tag %r' % (tag,))
979
+ yield from self._qformat(aelt, belt, atags, btags)
980
+ else:
981
+ # the synch pair is identical
982
+ yield ' ' + aelt
983
+
984
+ # pump out diffs from after the synch point
985
+ yield from self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi)
986
+
987
+ def _fancy_helper(self, a, alo, ahi, b, blo, bhi):
988
+ g = []
989
+ if alo < ahi:
990
+ if blo < bhi:
991
+ g = self._fancy_replace(a, alo, ahi, b, blo, bhi)
992
+ else:
993
+ g = self._dump('-', a, alo, ahi)
994
+ elif blo < bhi:
995
+ g = self._dump('+', b, blo, bhi)
996
+
997
+ yield from g
998
+
999
+ def _qformat(self, aline, bline, atags, btags):
1000
+ r"""
1001
+ Format "?" output and deal with tabs.
1002
+
1003
+ Example:
1004
+
1005
+ >>> d = Differ()
1006
+ >>> results = d._qformat('\tabcDefghiJkl\n', '\tabcdefGhijkl\n',
1007
+ ... ' ^ ^ ^ ', ' ^ ^ ^ ')
1008
+ >>> for line in results: print(repr(line))
1009
+ ...
1010
+ '- \tabcDefghiJkl\n'
1011
+ '? \t ^ ^ ^\n'
1012
+ '+ \tabcdefGhijkl\n'
1013
+ '? \t ^ ^ ^\n'
1014
+ """
1015
+ atags = _keep_original_ws(aline, atags).rstrip()
1016
+ btags = _keep_original_ws(bline, btags).rstrip()
1017
+
1018
+ yield "- " + aline
1019
+ if atags:
1020
+ yield f"? {atags}\n"
1021
+
1022
+ yield "+ " + bline
1023
+ if btags:
1024
+ yield f"? {btags}\n"
1025
+
1026
+ # With respect to junk, an earlier version of ndiff simply refused to
1027
+ # *start* a match with a junk element. The result was cases like this:
1028
+ # before: private Thread currentThread;
1029
+ # after: private volatile Thread currentThread;
1030
+ # If you consider whitespace to be junk, the longest contiguous match
1031
+ # not starting with junk is "e Thread currentThread". So ndiff reported
1032
+ # that "e volatil" was inserted between the 't' and the 'e' in "private".
1033
+ # While an accurate view, to people that's absurd. The current version
1034
+ # looks for matching blocks that are entirely junk-free, then extends the
1035
+ # longest one of those as far as possible but only with matching junk.
1036
+ # So now "currentThread" is matched, then extended to suck up the
1037
+ # preceding blank; then "private" is matched, and extended to suck up the
1038
+ # following blank; then "Thread" is matched; and finally ndiff reports
1039
+ # that "volatile " was inserted before "Thread". The only quibble
1040
+ # remaining is that perhaps it was really the case that " volatile"
1041
+ # was inserted after "private". I can live with that <wink>.
1042
+
1043
+ import re
1044
+
1045
+ def IS_LINE_JUNK(line, pat=re.compile(r"\s*(?:#\s*)?$").match):
1046
+ r"""
1047
+ Return True for ignorable line: iff `line` is blank or contains a single '#'.
1048
+
1049
+ Examples:
1050
+
1051
+ >>> IS_LINE_JUNK('\n')
1052
+ True
1053
+ >>> IS_LINE_JUNK(' # \n')
1054
+ True
1055
+ >>> IS_LINE_JUNK('hello\n')
1056
+ False
1057
+ """
1058
+
1059
+ return pat(line) is not None
1060
+
1061
+ def IS_CHARACTER_JUNK(ch, ws=" \t"):
1062
+ r"""
1063
+ Return True for ignorable character: iff `ch` is a space or tab.
1064
+
1065
+ Examples:
1066
+
1067
+ >>> IS_CHARACTER_JUNK(' ')
1068
+ True
1069
+ >>> IS_CHARACTER_JUNK('\t')
1070
+ True
1071
+ >>> IS_CHARACTER_JUNK('\n')
1072
+ False
1073
+ >>> IS_CHARACTER_JUNK('x')
1074
+ False
1075
+ """
1076
+
1077
+ return ch in ws
1078
+
1079
+
1080
+ ########################################################################
1081
+ ### Unified Diff
1082
+ ########################################################################
1083
+
1084
+ def _format_range_unified(start, stop):
1085
+ 'Convert range to the "ed" format'
1086
+ # Per the diff spec at http://www.unix.org/single_unix_specification/
1087
+ beginning = start + 1 # lines start numbering with one
1088
+ length = stop - start
1089
+ if length == 1:
1090
+ return '{}'.format(beginning)
1091
+ if not length:
1092
+ beginning -= 1 # empty ranges begin at line just before the range
1093
+ return '{},{}'.format(beginning, length)
1094
+
1095
+ def unified_diff(a, b, fromfile='', tofile='', fromfiledate='',
1096
+ tofiledate='', n=3, lineterm='\n'):
1097
+ r"""
1098
+ Compare two sequences of lines; generate the delta as a unified diff.
1099
+
1100
+ Unified diffs are a compact way of showing line changes and a few
1101
+ lines of context. The number of context lines is set by 'n' which
1102
+ defaults to three.
1103
+
1104
+ By default, the diff control lines (those with ---, +++, or @@) are
1105
+ created with a trailing newline. This is helpful so that inputs
1106
+ created from file.readlines() result in diffs that are suitable for
1107
+ file.writelines() since both the inputs and outputs have trailing
1108
+ newlines.
1109
+
1110
+ For inputs that do not have trailing newlines, set the lineterm
1111
+ argument to "" so that the output will be uniformly newline free.
1112
+
1113
+ The unidiff format normally has a header for filenames and modification
1114
+ times. Any or all of these may be specified using strings for
1115
+ 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
1116
+ The modification times are normally expressed in the ISO 8601 format.
1117
+
1118
+ Example:
1119
+
1120
+ >>> for line in unified_diff('one two three four'.split(),
1121
+ ... 'zero one tree four'.split(), 'Original', 'Current',
1122
+ ... '2005-01-26 23:30:50', '2010-04-02 10:20:52',
1123
+ ... lineterm=''):
1124
+ ... print(line) # doctest: +NORMALIZE_WHITESPACE
1125
+ --- Original 2005-01-26 23:30:50
1126
+ +++ Current 2010-04-02 10:20:52
1127
+ @@ -1,4 +1,4 @@
1128
+ +zero
1129
+ one
1130
+ -two
1131
+ -three
1132
+ +tree
1133
+ four
1134
+ """
1135
+
1136
+ _check_types(a, b, fromfile, tofile, fromfiledate, tofiledate, lineterm)
1137
+ started = False
1138
+ for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n):
1139
+ if not started:
1140
+ started = True
1141
+ fromdate = '\t{}'.format(fromfiledate) if fromfiledate else ''
1142
+ todate = '\t{}'.format(tofiledate) if tofiledate else ''
1143
+ yield '--- {}{}{}'.format(fromfile, fromdate, lineterm)
1144
+ yield '+++ {}{}{}'.format(tofile, todate, lineterm)
1145
+
1146
+ first, last = group[0], group[-1]
1147
+ file1_range = _format_range_unified(first[1], last[2])
1148
+ file2_range = _format_range_unified(first[3], last[4])
1149
+ yield '@@ -{} +{} @@{}'.format(file1_range, file2_range, lineterm)
1150
+
1151
+ for tag, i1, i2, j1, j2 in group:
1152
+ if tag == 'equal':
1153
+ for line in a[i1:i2]:
1154
+ yield ' ' + line
1155
+ continue
1156
+ if tag in {'replace', 'delete'}:
1157
+ for line in a[i1:i2]:
1158
+ yield '-' + line
1159
+ if tag in {'replace', 'insert'}:
1160
+ for line in b[j1:j2]:
1161
+ yield '+' + line
1162
+
1163
+
1164
+ ########################################################################
1165
+ ### Context Diff
1166
+ ########################################################################
1167
+
1168
+ def _format_range_context(start, stop):
1169
+ 'Convert range to the "ed" format'
1170
+ # Per the diff spec at http://www.unix.org/single_unix_specification/
1171
+ beginning = start + 1 # lines start numbering with one
1172
+ length = stop - start
1173
+ if not length:
1174
+ beginning -= 1 # empty ranges begin at line just before the range
1175
+ if length <= 1:
1176
+ return '{}'.format(beginning)
1177
+ return '{},{}'.format(beginning, beginning + length - 1)
1178
+
1179
+ # See http://www.unix.org/single_unix_specification/
1180
+ def context_diff(a, b, fromfile='', tofile='',
1181
+ fromfiledate='', tofiledate='', n=3, lineterm='\n'):
1182
+ r"""
1183
+ Compare two sequences of lines; generate the delta as a context diff.
1184
+
1185
+ Context diffs are a compact way of showing line changes and a few
1186
+ lines of context. The number of context lines is set by 'n' which
1187
+ defaults to three.
1188
+
1189
+ By default, the diff control lines (those with *** or ---) are
1190
+ created with a trailing newline. This is helpful so that inputs
1191
+ created from file.readlines() result in diffs that are suitable for
1192
+ file.writelines() since both the inputs and outputs have trailing
1193
+ newlines.
1194
+
1195
+ For inputs that do not have trailing newlines, set the lineterm
1196
+ argument to "" so that the output will be uniformly newline free.
1197
+
1198
+ The context diff format normally has a header for filenames and
1199
+ modification times. Any or all of these may be specified using
1200
+ strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
1201
+ The modification times are normally expressed in the ISO 8601 format.
1202
+ If not specified, the strings default to blanks.
1203
+
1204
+ Example:
1205
+
1206
+ >>> print(''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(True),
1207
+ ... 'zero\none\ntree\nfour\n'.splitlines(True), 'Original', 'Current')),
1208
+ ... end="")
1209
+ *** Original
1210
+ --- Current
1211
+ ***************
1212
+ *** 1,4 ****
1213
+ one
1214
+ ! two
1215
+ ! three
1216
+ four
1217
+ --- 1,4 ----
1218
+ + zero
1219
+ one
1220
+ ! tree
1221
+ four
1222
+ """
1223
+
1224
+ _check_types(a, b, fromfile, tofile, fromfiledate, tofiledate, lineterm)
1225
+ prefix = dict(insert='+ ', delete='- ', replace='! ', equal=' ')
1226
+ started = False
1227
+ for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n):
1228
+ if not started:
1229
+ started = True
1230
+ fromdate = '\t{}'.format(fromfiledate) if fromfiledate else ''
1231
+ todate = '\t{}'.format(tofiledate) if tofiledate else ''
1232
+ yield '*** {}{}{}'.format(fromfile, fromdate, lineterm)
1233
+ yield '--- {}{}{}'.format(tofile, todate, lineterm)
1234
+
1235
+ first, last = group[0], group[-1]
1236
+ yield '***************' + lineterm
1237
+
1238
+ file1_range = _format_range_context(first[1], last[2])
1239
+ yield '*** {} ****{}'.format(file1_range, lineterm)
1240
+
1241
+ if any(tag in {'replace', 'delete'} for tag, _, _, _, _ in group):
1242
+ for tag, i1, i2, _, _ in group:
1243
+ if tag != 'insert':
1244
+ for line in a[i1:i2]:
1245
+ yield prefix[tag] + line
1246
+
1247
+ file2_range = _format_range_context(first[3], last[4])
1248
+ yield '--- {} ----{}'.format(file2_range, lineterm)
1249
+
1250
+ if any(tag in {'replace', 'insert'} for tag, _, _, _, _ in group):
1251
+ for tag, _, _, j1, j2 in group:
1252
+ if tag != 'delete':
1253
+ for line in b[j1:j2]:
1254
+ yield prefix[tag] + line
1255
+
1256
+ def _check_types(a, b, *args):
1257
+ # Checking types is weird, but the alternative is garbled output when
1258
+ # someone passes mixed bytes and str to {unified,context}_diff(). E.g.
1259
+ # without this check, passing filenames as bytes results in output like
1260
+ # --- b'oldfile.txt'
1261
+ # +++ b'newfile.txt'
1262
+ # because of how str.format() incorporates bytes objects.
1263
+ if a and not isinstance(a[0], str):
1264
+ raise TypeError('lines to compare must be str, not %s (%r)' %
1265
+ (type(a[0]).__name__, a[0]))
1266
+ if b and not isinstance(b[0], str):
1267
+ raise TypeError('lines to compare must be str, not %s (%r)' %
1268
+ (type(b[0]).__name__, b[0]))
1269
+ for arg in args:
1270
+ if not isinstance(arg, str):
1271
+ raise TypeError('all arguments must be str, not: %r' % (arg,))
1272
+
1273
+ def diff_bytes(dfunc, a, b, fromfile=b'', tofile=b'',
1274
+ fromfiledate=b'', tofiledate=b'', n=3, lineterm=b'\n'):
1275
+ r"""
1276
+ Compare `a` and `b`, two sequences of lines represented as bytes rather
1277
+ than str. This is a wrapper for `dfunc`, which is typically either
1278
+ unified_diff() or context_diff(). Inputs are losslessly converted to
1279
+ strings so that `dfunc` only has to worry about strings, and encoded
1280
+ back to bytes on return. This is necessary to compare files with
1281
+ unknown or inconsistent encoding. All other inputs (except `n`) must be
1282
+ bytes rather than str.
1283
+ """
1284
+ def decode(s):
1285
+ try:
1286
+ return s.decode('ascii', 'surrogateescape')
1287
+ except AttributeError as err:
1288
+ msg = ('all arguments must be bytes, not %s (%r)' %
1289
+ (type(s).__name__, s))
1290
+ raise TypeError(msg) from err
1291
+ a = list(map(decode, a))
1292
+ b = list(map(decode, b))
1293
+ fromfile = decode(fromfile)
1294
+ tofile = decode(tofile)
1295
+ fromfiledate = decode(fromfiledate)
1296
+ tofiledate = decode(tofiledate)
1297
+ lineterm = decode(lineterm)
1298
+
1299
+ lines = dfunc(a, b, fromfile, tofile, fromfiledate, tofiledate, n, lineterm)
1300
+ for line in lines:
1301
+ yield line.encode('ascii', 'surrogateescape')
1302
+
1303
+ def ndiff(a, b, linejunk=None, charjunk=IS_CHARACTER_JUNK):
1304
+ r"""
1305
+ Compare `a` and `b` (lists of strings); return a `Differ`-style delta.
1306
+
1307
+ Optional keyword parameters `linejunk` and `charjunk` are for filter
1308
+ functions, or can be None:
1309
+
1310
+ - linejunk: A function that should accept a single string argument and
1311
+ return true iff the string is junk. The default is None, and is
1312
+ recommended; the underlying SequenceMatcher class has an adaptive
1313
+ notion of "noise" lines.
1314
+
1315
+ - charjunk: A function that accepts a character (string of length
1316
+ 1), and returns true iff the character is junk. The default is
1317
+ the module-level function IS_CHARACTER_JUNK, which filters out
1318
+ whitespace characters (a blank or tab; note: it's a bad idea to
1319
+ include newline in this!).
1320
+
1321
+ Tools/scripts/ndiff.py is a command-line front-end to this function.
1322
+
1323
+ Example:
1324
+
1325
+ >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(keepends=True),
1326
+ ... 'ore\ntree\nemu\n'.splitlines(keepends=True))
1327
+ >>> print(''.join(diff), end="")
1328
+ - one
1329
+ ? ^
1330
+ + ore
1331
+ ? ^
1332
+ - two
1333
+ - three
1334
+ ? -
1335
+ + tree
1336
+ + emu
1337
+ """
1338
+ return Differ(linejunk, charjunk).compare(a, b)
1339
+
1340
+ def _mdiff(fromlines, tolines, context=None, linejunk=None,
1341
+ charjunk=IS_CHARACTER_JUNK):
1342
+ r"""Returns generator yielding marked up from/to side by side differences.
1343
+
1344
+ Arguments:
1345
+ fromlines -- list of text lines to compared to tolines
1346
+ tolines -- list of text lines to be compared to fromlines
1347
+ context -- number of context lines to display on each side of difference,
1348
+ if None, all from/to text lines will be generated.
1349
+ linejunk -- passed on to ndiff (see ndiff documentation)
1350
+ charjunk -- passed on to ndiff (see ndiff documentation)
1351
+
1352
+ This function returns an iterator which returns a tuple:
1353
+ (from line tuple, to line tuple, boolean flag)
1354
+
1355
+ from/to line tuple -- (line num, line text)
1356
+ line num -- integer or None (to indicate a context separation)
1357
+ line text -- original line text with following markers inserted:
1358
+ '\0+' -- marks start of added text
1359
+ '\0-' -- marks start of deleted text
1360
+ '\0^' -- marks start of changed text
1361
+ '\1' -- marks end of added/deleted/changed text
1362
+
1363
+ boolean flag -- None indicates context separation, True indicates
1364
+ either "from" or "to" line contains a change, otherwise False.
1365
+
1366
+ This function/iterator was originally developed to generate side by side
1367
+ file difference for making HTML pages (see HtmlDiff class for example
1368
+ usage).
1369
+
1370
+ Note, this function utilizes the ndiff function to generate the side by
1371
+ side difference markup. Optional ndiff arguments may be passed to this
1372
+ function and they in turn will be passed to ndiff.
1373
+ """
1374
+ import re
1375
+
1376
+ # regular expression for finding intraline change indices
1377
+ change_re = re.compile(r'(\++|\-+|\^+)')
1378
+
1379
+ # create the difference iterator to generate the differences
1380
+ diff_lines_iterator = ndiff(fromlines,tolines,linejunk,charjunk)
1381
+
1382
+ def _make_line(lines, format_key, side, num_lines=[0,0]):
1383
+ """Returns line of text with user's change markup and line formatting.
1384
+
1385
+ lines -- list of lines from the ndiff generator to produce a line of
1386
+ text from. When producing the line of text to return, the
1387
+ lines used are removed from this list.
1388
+ format_key -- '+' return first line in list with "add" markup around
1389
+ the entire line.
1390
+ '-' return first line in list with "delete" markup around
1391
+ the entire line.
1392
+ '?' return first line in list with add/delete/change
1393
+ intraline markup (indices obtained from second line)
1394
+ None return first line in list with no markup
1395
+ side -- indice into the num_lines list (0=from,1=to)
1396
+ num_lines -- from/to current line number. This is NOT intended to be a
1397
+ passed parameter. It is present as a keyword argument to
1398
+ maintain memory of the current line numbers between calls
1399
+ of this function.
1400
+
1401
+ Note, this function is purposefully not defined at the module scope so
1402
+ that data it needs from its parent function (within whose context it
1403
+ is defined) does not need to be of module scope.
1404
+ """
1405
+ num_lines[side] += 1
1406
+ # Handle case where no user markup is to be added, just return line of
1407
+ # text with user's line format to allow for usage of the line number.
1408
+ if format_key is None:
1409
+ return (num_lines[side],lines.pop(0)[2:])
1410
+ # Handle case of intraline changes
1411
+ if format_key == '?':
1412
+ text, markers = lines.pop(0), lines.pop(0)
1413
+ # find intraline changes (store change type and indices in tuples)
1414
+ sub_info = []
1415
+ def record_sub_info(match_object,sub_info=sub_info):
1416
+ sub_info.append([match_object.group(1)[0],match_object.span()])
1417
+ return match_object.group(1)
1418
+ change_re.sub(record_sub_info,markers)
1419
+ # process each tuple inserting our special marks that won't be
1420
+ # noticed by an xml/html escaper.
1421
+ for key,(begin,end) in reversed(sub_info):
1422
+ text = text[0:begin]+'\0'+key+text[begin:end]+'\1'+text[end:]
1423
+ text = text[2:]
1424
+ # Handle case of add/delete entire line
1425
+ else:
1426
+ text = lines.pop(0)[2:]
1427
+ # if line of text is just a newline, insert a space so there is
1428
+ # something for the user to highlight and see.
1429
+ if not text:
1430
+ text = ' '
1431
+ # insert marks that won't be noticed by an xml/html escaper.
1432
+ text = '\0' + format_key + text + '\1'
1433
+ # Return line of text, first allow user's line formatter to do its
1434
+ # thing (such as adding the line number) then replace the special
1435
+ # marks with what the user's change markup.
1436
+ return (num_lines[side],text)
1437
+
1438
+ def _line_iterator():
1439
+ """Yields from/to lines of text with a change indication.
1440
+
1441
+ This function is an iterator. It itself pulls lines from a
1442
+ differencing iterator, processes them and yields them. When it can
1443
+ it yields both a "from" and a "to" line, otherwise it will yield one
1444
+ or the other. In addition to yielding the lines of from/to text, a
1445
+ boolean flag is yielded to indicate if the text line(s) have
1446
+ differences in them.
1447
+
1448
+ Note, this function is purposefully not defined at the module scope so
1449
+ that data it needs from its parent function (within whose context it
1450
+ is defined) does not need to be of module scope.
1451
+ """
1452
+ lines = []
1453
+ num_blanks_pending, num_blanks_to_yield = 0, 0
1454
+ while True:
1455
+ # Load up next 4 lines so we can look ahead, create strings which
1456
+ # are a concatenation of the first character of each of the 4 lines
1457
+ # so we can do some very readable comparisons.
1458
+ while len(lines) < 4:
1459
+ lines.append(next(diff_lines_iterator, 'X'))
1460
+ s = ''.join([line[0] for line in lines])
1461
+ if s.startswith('X'):
1462
+ # When no more lines, pump out any remaining blank lines so the
1463
+ # corresponding add/delete lines get a matching blank line so
1464
+ # all line pairs get yielded at the next level.
1465
+ num_blanks_to_yield = num_blanks_pending
1466
+ elif s.startswith('-?+?'):
1467
+ # simple intraline change
1468
+ yield _make_line(lines,'?',0), _make_line(lines,'?',1), True
1469
+ continue
1470
+ elif s.startswith('--++'):
1471
+ # in delete block, add block coming: we do NOT want to get
1472
+ # caught up on blank lines yet, just process the delete line
1473
+ num_blanks_pending -= 1
1474
+ yield _make_line(lines,'-',0), None, True
1475
+ continue
1476
+ elif s.startswith(('--?+', '--+', '- ')):
1477
+ # in delete block and see an intraline change or unchanged line
1478
+ # coming: yield the delete line and then blanks
1479
+ from_line,to_line = _make_line(lines,'-',0), None
1480
+ num_blanks_to_yield,num_blanks_pending = num_blanks_pending-1,0
1481
+ elif s.startswith('-+?'):
1482
+ # intraline change
1483
+ yield _make_line(lines,None,0), _make_line(lines,'?',1), True
1484
+ continue
1485
+ elif s.startswith('-?+'):
1486
+ # intraline change
1487
+ yield _make_line(lines,'?',0), _make_line(lines,None,1), True
1488
+ continue
1489
+ elif s.startswith('-'):
1490
+ # delete FROM line
1491
+ num_blanks_pending -= 1
1492
+ yield _make_line(lines,'-',0), None, True
1493
+ continue
1494
+ elif s.startswith('+--'):
1495
+ # in add block, delete block coming: we do NOT want to get
1496
+ # caught up on blank lines yet, just process the add line
1497
+ num_blanks_pending += 1
1498
+ yield None, _make_line(lines,'+',1), True
1499
+ continue
1500
+ elif s.startswith(('+ ', '+-')):
1501
+ # will be leaving an add block: yield blanks then add line
1502
+ from_line, to_line = None, _make_line(lines,'+',1)
1503
+ num_blanks_to_yield,num_blanks_pending = num_blanks_pending+1,0
1504
+ elif s.startswith('+'):
1505
+ # inside an add block, yield the add line
1506
+ num_blanks_pending += 1
1507
+ yield None, _make_line(lines,'+',1), True
1508
+ continue
1509
+ elif s.startswith(' '):
1510
+ # unchanged text, yield it to both sides
1511
+ yield _make_line(lines[:],None,0),_make_line(lines,None,1),False
1512
+ continue
1513
+ # Catch up on the blank lines so when we yield the next from/to
1514
+ # pair, they are lined up.
1515
+ while(num_blanks_to_yield < 0):
1516
+ num_blanks_to_yield += 1
1517
+ yield None,('','\n'),True
1518
+ while(num_blanks_to_yield > 0):
1519
+ num_blanks_to_yield -= 1
1520
+ yield ('','\n'),None,True
1521
+ if s.startswith('X'):
1522
+ return
1523
+ else:
1524
+ yield from_line,to_line,True
1525
+
1526
+ def _line_pair_iterator():
1527
+ """Yields from/to lines of text with a change indication.
1528
+
1529
+ This function is an iterator. It itself pulls lines from the line
1530
+ iterator. Its difference from that iterator is that this function
1531
+ always yields a pair of from/to text lines (with the change
1532
+ indication). If necessary it will collect single from/to lines
1533
+ until it has a matching pair from/to pair to yield.
1534
+
1535
+ Note, this function is purposefully not defined at the module scope so
1536
+ that data it needs from its parent function (within whose context it
1537
+ is defined) does not need to be of module scope.
1538
+ """
1539
+ line_iterator = _line_iterator()
1540
+ fromlines,tolines=[],[]
1541
+ while True:
1542
+ # Collecting lines of text until we have a from/to pair
1543
+ while (len(fromlines)==0 or len(tolines)==0):
1544
+ try:
1545
+ from_line, to_line, found_diff = next(line_iterator)
1546
+ except StopIteration:
1547
+ return
1548
+ if from_line is not None:
1549
+ fromlines.append((from_line,found_diff))
1550
+ if to_line is not None:
1551
+ tolines.append((to_line,found_diff))
1552
+ # Once we have a pair, remove them from the collection and yield it
1553
+ from_line, fromDiff = fromlines.pop(0)
1554
+ to_line, to_diff = tolines.pop(0)
1555
+ yield (from_line,to_line,fromDiff or to_diff)
1556
+
1557
+ # Handle case where user does not want context differencing, just yield
1558
+ # them up without doing anything else with them.
1559
+ line_pair_iterator = _line_pair_iterator()
1560
+ if context is None:
1561
+ yield from line_pair_iterator
1562
+ # Handle case where user wants context differencing. We must do some
1563
+ # storage of lines until we know for sure that they are to be yielded.
1564
+ else:
1565
+ context += 1
1566
+ lines_to_write = 0
1567
+ while True:
1568
+ # Store lines up until we find a difference, note use of a
1569
+ # circular queue because we only need to keep around what
1570
+ # we need for context.
1571
+ index, contextLines = 0, [None]*(context)
1572
+ found_diff = False
1573
+ while(found_diff is False):
1574
+ try:
1575
+ from_line, to_line, found_diff = next(line_pair_iterator)
1576
+ except StopIteration:
1577
+ return
1578
+ i = index % context
1579
+ contextLines[i] = (from_line, to_line, found_diff)
1580
+ index += 1
1581
+ # Yield lines that we have collected so far, but first yield
1582
+ # the user's separator.
1583
+ if index > context:
1584
+ yield None, None, None
1585
+ lines_to_write = context
1586
+ else:
1587
+ lines_to_write = index
1588
+ index = 0
1589
+ while(lines_to_write):
1590
+ i = index % context
1591
+ index += 1
1592
+ yield contextLines[i]
1593
+ lines_to_write -= 1
1594
+ # Now yield the context lines after the change
1595
+ lines_to_write = context-1
1596
+ try:
1597
+ while(lines_to_write):
1598
+ from_line, to_line, found_diff = next(line_pair_iterator)
1599
+ # If another change within the context, extend the context
1600
+ if found_diff:
1601
+ lines_to_write = context-1
1602
+ else:
1603
+ lines_to_write -= 1
1604
+ yield from_line, to_line, found_diff
1605
+ except StopIteration:
1606
+ # Catch exception from next() and return normally
1607
+ return
1608
+
1609
+
1610
+ _file_template = """
1611
+ <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
1612
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1613
+
1614
+ <html>
1615
+
1616
+ <head>
1617
+ <meta http-equiv="Content-Type"
1618
+ content="text/html; charset=%(charset)s" />
1619
+ <title></title>
1620
+ <style type="text/css">%(styles)s
1621
+ </style>
1622
+ </head>
1623
+
1624
+ <body>
1625
+ %(table)s%(legend)s
1626
+ </body>
1627
+
1628
+ </html>"""
1629
+
1630
+ _styles = """
1631
+ table.diff {font-family:Courier; border:medium;}
1632
+ .diff_header {background-color:#e0e0e0}
1633
+ td.diff_header {text-align:right}
1634
+ .diff_next {background-color:#c0c0c0}
1635
+ .diff_add {background-color:#aaffaa}
1636
+ .diff_chg {background-color:#ffff77}
1637
+ .diff_sub {background-color:#ffaaaa}"""
1638
+
1639
+ _table_template = """
1640
+ <table class="diff" id="difflib_chg_%(prefix)s_top"
1641
+ cellspacing="0" cellpadding="0" rules="groups" >
1642
+ <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup>
1643
+ <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup>
1644
+ %(header_row)s
1645
+ <tbody>
1646
+ %(data_rows)s </tbody>
1647
+ </table>"""
1648
+
1649
+ _legend = """
1650
+ <table class="diff" summary="Legends">
1651
+ <tr> <th colspan="2"> Legends </th> </tr>
1652
+ <tr> <td> <table border="" summary="Colors">
1653
+ <tr><th> Colors </th> </tr>
1654
+ <tr><td class="diff_add">&nbsp;Added&nbsp;</td></tr>
1655
+ <tr><td class="diff_chg">Changed</td> </tr>
1656
+ <tr><td class="diff_sub">Deleted</td> </tr>
1657
+ </table></td>
1658
+ <td> <table border="" summary="Links">
1659
+ <tr><th colspan="2"> Links </th> </tr>
1660
+ <tr><td>(f)irst change</td> </tr>
1661
+ <tr><td>(n)ext change</td> </tr>
1662
+ <tr><td>(t)op</td> </tr>
1663
+ </table></td> </tr>
1664
+ </table>"""
1665
+
1666
+ class HtmlDiff(object):
1667
+ """For producing HTML side by side comparison with change highlights.
1668
+
1669
+ This class can be used to create an HTML table (or a complete HTML file
1670
+ containing the table) showing a side by side, line by line comparison
1671
+ of text with inter-line and intra-line change highlights. The table can
1672
+ be generated in either full or contextual difference mode.
1673
+
1674
+ The following methods are provided for HTML generation:
1675
+
1676
+ make_table -- generates HTML for a single side by side table
1677
+ make_file -- generates complete HTML file with a single side by side table
1678
+
1679
+ See tools/scripts/diff.py for an example usage of this class.
1680
+ """
1681
+
1682
+ _file_template = _file_template
1683
+ _styles = _styles
1684
+ _table_template = _table_template
1685
+ _legend = _legend
1686
+ _default_prefix = 0
1687
+
1688
+ def __init__(self,tabsize=8,wrapcolumn=None,linejunk=None,
1689
+ charjunk=IS_CHARACTER_JUNK):
1690
+ """HtmlDiff instance initializer
1691
+
1692
+ Arguments:
1693
+ tabsize -- tab stop spacing, defaults to 8.
1694
+ wrapcolumn -- column number where lines are broken and wrapped,
1695
+ defaults to None where lines are not wrapped.
1696
+ linejunk,charjunk -- keyword arguments passed into ndiff() (used by
1697
+ HtmlDiff() to generate the side by side HTML differences). See
1698
+ ndiff() documentation for argument default values and descriptions.
1699
+ """
1700
+ self._tabsize = tabsize
1701
+ self._wrapcolumn = wrapcolumn
1702
+ self._linejunk = linejunk
1703
+ self._charjunk = charjunk
1704
+
1705
+ def make_file(self, fromlines, tolines, fromdesc='', todesc='',
1706
+ context=False, numlines=5, *, charset='utf-8'):
1707
+ """Returns HTML file of side by side comparison with change highlights
1708
+
1709
+ Arguments:
1710
+ fromlines -- list of "from" lines
1711
+ tolines -- list of "to" lines
1712
+ fromdesc -- "from" file column header string
1713
+ todesc -- "to" file column header string
1714
+ context -- set to True for contextual differences (defaults to False
1715
+ which shows full differences).
1716
+ numlines -- number of context lines. When context is set True,
1717
+ controls number of lines displayed before and after the change.
1718
+ When context is False, controls the number of lines to place
1719
+ the "next" link anchors before the next change (so click of
1720
+ "next" link jumps to just before the change).
1721
+ charset -- charset of the HTML document
1722
+ """
1723
+
1724
+ return (self._file_template % dict(
1725
+ styles=self._styles,
1726
+ legend=self._legend,
1727
+ table=self.make_table(fromlines, tolines, fromdesc, todesc,
1728
+ context=context, numlines=numlines),
1729
+ charset=charset
1730
+ )).encode(charset, 'xmlcharrefreplace').decode(charset)
1731
+
1732
+ def _tab_newline_replace(self,fromlines,tolines):
1733
+ """Returns from/to line lists with tabs expanded and newlines removed.
1734
+
1735
+ Instead of tab characters being replaced by the number of spaces
1736
+ needed to fill in to the next tab stop, this function will fill
1737
+ the space with tab characters. This is done so that the difference
1738
+ algorithms can identify changes in a file when tabs are replaced by
1739
+ spaces and vice versa. At the end of the HTML generation, the tab
1740
+ characters will be replaced with a nonbreakable space.
1741
+ """
1742
+ def expand_tabs(line):
1743
+ # hide real spaces
1744
+ line = line.replace(' ','\0')
1745
+ # expand tabs into spaces
1746
+ line = line.expandtabs(self._tabsize)
1747
+ # replace spaces from expanded tabs back into tab characters
1748
+ # (we'll replace them with markup after we do differencing)
1749
+ line = line.replace(' ','\t')
1750
+ return line.replace('\0',' ').rstrip('\n')
1751
+ fromlines = [expand_tabs(line) for line in fromlines]
1752
+ tolines = [expand_tabs(line) for line in tolines]
1753
+ return fromlines,tolines
1754
+
1755
+ def _split_line(self,data_list,line_num,text):
1756
+ """Builds list of text lines by splitting text lines at wrap point
1757
+
1758
+ This function will determine if the input text line needs to be
1759
+ wrapped (split) into separate lines. If so, the first wrap point
1760
+ will be determined and the first line appended to the output
1761
+ text line list. This function is used recursively to handle
1762
+ the second part of the split line to further split it.
1763
+ """
1764
+ # if blank line or context separator, just add it to the output list
1765
+ if not line_num:
1766
+ data_list.append((line_num,text))
1767
+ return
1768
+
1769
+ # if line text doesn't need wrapping, just add it to the output list
1770
+ size = len(text)
1771
+ max = self._wrapcolumn
1772
+ if (size <= max) or ((size -(text.count('\0')*3)) <= max):
1773
+ data_list.append((line_num,text))
1774
+ return
1775
+
1776
+ # scan text looking for the wrap point, keeping track if the wrap
1777
+ # point is inside markers
1778
+ i = 0
1779
+ n = 0
1780
+ mark = ''
1781
+ while n < max and i < size:
1782
+ if text[i] == '\0':
1783
+ i += 1
1784
+ mark = text[i]
1785
+ i += 1
1786
+ elif text[i] == '\1':
1787
+ i += 1
1788
+ mark = ''
1789
+ else:
1790
+ i += 1
1791
+ n += 1
1792
+
1793
+ # wrap point is inside text, break it up into separate lines
1794
+ line1 = text[:i]
1795
+ line2 = text[i:]
1796
+
1797
+ # if wrap point is inside markers, place end marker at end of first
1798
+ # line and start marker at beginning of second line because each
1799
+ # line will have its own table tag markup around it.
1800
+ if mark:
1801
+ line1 = line1 + '\1'
1802
+ line2 = '\0' + mark + line2
1803
+
1804
+ # tack on first line onto the output list
1805
+ data_list.append((line_num,line1))
1806
+
1807
+ # use this routine again to wrap the remaining text
1808
+ self._split_line(data_list,'>',line2)
1809
+
1810
+ def _line_wrapper(self,diffs):
1811
+ """Returns iterator that splits (wraps) mdiff text lines"""
1812
+
1813
+ # pull from/to data and flags from mdiff iterator
1814
+ for fromdata,todata,flag in diffs:
1815
+ # check for context separators and pass them through
1816
+ if flag is None:
1817
+ yield fromdata,todata,flag
1818
+ continue
1819
+ (fromline,fromtext),(toline,totext) = fromdata,todata
1820
+ # for each from/to line split it at the wrap column to form
1821
+ # list of text lines.
1822
+ fromlist,tolist = [],[]
1823
+ self._split_line(fromlist,fromline,fromtext)
1824
+ self._split_line(tolist,toline,totext)
1825
+ # yield from/to line in pairs inserting blank lines as
1826
+ # necessary when one side has more wrapped lines
1827
+ while fromlist or tolist:
1828
+ if fromlist:
1829
+ fromdata = fromlist.pop(0)
1830
+ else:
1831
+ fromdata = ('',' ')
1832
+ if tolist:
1833
+ todata = tolist.pop(0)
1834
+ else:
1835
+ todata = ('',' ')
1836
+ yield fromdata,todata,flag
1837
+
1838
+ def _collect_lines(self,diffs):
1839
+ """Collects mdiff output into separate lists
1840
+
1841
+ Before storing the mdiff from/to data into a list, it is converted
1842
+ into a single line of text with HTML markup.
1843
+ """
1844
+
1845
+ fromlist,tolist,flaglist = [],[],[]
1846
+ # pull from/to data and flags from mdiff style iterator
1847
+ for fromdata,todata,flag in diffs:
1848
+ try:
1849
+ # store HTML markup of the lines into the lists
1850
+ fromlist.append(self._format_line(0,flag,*fromdata))
1851
+ tolist.append(self._format_line(1,flag,*todata))
1852
+ except TypeError:
1853
+ # exceptions occur for lines where context separators go
1854
+ fromlist.append(None)
1855
+ tolist.append(None)
1856
+ flaglist.append(flag)
1857
+ return fromlist,tolist,flaglist
1858
+
1859
+ def _format_line(self,side,flag,linenum,text):
1860
+ """Returns HTML markup of "from" / "to" text lines
1861
+
1862
+ side -- 0 or 1 indicating "from" or "to" text
1863
+ flag -- indicates if difference on line
1864
+ linenum -- line number (used for line number column)
1865
+ text -- line text to be marked up
1866
+ """
1867
+ try:
1868
+ linenum = '%d' % linenum
1869
+ id = ' id="%s%s"' % (self._prefix[side],linenum)
1870
+ except TypeError:
1871
+ # handle blank lines where linenum is '>' or ''
1872
+ id = ''
1873
+ # replace those things that would get confused with HTML symbols
1874
+ text=text.replace("&","&amp;").replace(">","&gt;").replace("<","&lt;")
1875
+
1876
+ # make space non-breakable so they don't get compressed or line wrapped
1877
+ text = text.replace(' ','&nbsp;').rstrip()
1878
+
1879
+ return '<td class="diff_header"%s>%s</td><td nowrap="nowrap">%s</td>' \
1880
+ % (id,linenum,text)
1881
+
1882
+ def _make_prefix(self):
1883
+ """Create unique anchor prefixes"""
1884
+
1885
+ # Generate a unique anchor prefix so multiple tables
1886
+ # can exist on the same HTML page without conflicts.
1887
+ fromprefix = "from%d_" % HtmlDiff._default_prefix
1888
+ toprefix = "to%d_" % HtmlDiff._default_prefix
1889
+ HtmlDiff._default_prefix += 1
1890
+ # store prefixes so line format method has access
1891
+ self._prefix = [fromprefix,toprefix]
1892
+
1893
+ def _convert_flags(self,fromlist,tolist,flaglist,context,numlines):
1894
+ """Makes list of "next" links"""
1895
+
1896
+ # all anchor names will be generated using the unique "to" prefix
1897
+ toprefix = self._prefix[1]
1898
+
1899
+ # process change flags, generating middle column of next anchors/links
1900
+ next_id = ['']*len(flaglist)
1901
+ next_href = ['']*len(flaglist)
1902
+ num_chg, in_change = 0, False
1903
+ last = 0
1904
+ for i,flag in enumerate(flaglist):
1905
+ if flag:
1906
+ if not in_change:
1907
+ in_change = True
1908
+ last = i
1909
+ # at the beginning of a change, drop an anchor a few lines
1910
+ # (the context lines) before the change for the previous
1911
+ # link
1912
+ i = max([0,i-numlines])
1913
+ next_id[i] = ' id="difflib_chg_%s_%d"' % (toprefix,num_chg)
1914
+ # at the beginning of a change, drop a link to the next
1915
+ # change
1916
+ num_chg += 1
1917
+ next_href[last] = '<a href="#difflib_chg_%s_%d">n</a>' % (
1918
+ toprefix,num_chg)
1919
+ else:
1920
+ in_change = False
1921
+ # check for cases where there is no content to avoid exceptions
1922
+ if not flaglist:
1923
+ flaglist = [False]
1924
+ next_id = ['']
1925
+ next_href = ['']
1926
+ last = 0
1927
+ if context:
1928
+ fromlist = ['<td></td><td>&nbsp;No Differences Found&nbsp;</td>']
1929
+ tolist = fromlist
1930
+ else:
1931
+ fromlist = tolist = ['<td></td><td>&nbsp;Empty File&nbsp;</td>']
1932
+ # if not a change on first line, drop a link
1933
+ if not flaglist[0]:
1934
+ next_href[0] = '<a href="#difflib_chg_%s_0">f</a>' % toprefix
1935
+ # redo the last link to link to the top
1936
+ next_href[last] = '<a href="#difflib_chg_%s_top">t</a>' % (toprefix)
1937
+
1938
+ return fromlist,tolist,flaglist,next_href,next_id
1939
+
1940
+ def make_table(self,fromlines,tolines,fromdesc='',todesc='',context=False,
1941
+ numlines=5):
1942
+ """Returns HTML table of side by side comparison with change highlights
1943
+
1944
+ Arguments:
1945
+ fromlines -- list of "from" lines
1946
+ tolines -- list of "to" lines
1947
+ fromdesc -- "from" file column header string
1948
+ todesc -- "to" file column header string
1949
+ context -- set to True for contextual differences (defaults to False
1950
+ which shows full differences).
1951
+ numlines -- number of context lines. When context is set True,
1952
+ controls number of lines displayed before and after the change.
1953
+ When context is False, controls the number of lines to place
1954
+ the "next" link anchors before the next change (so click of
1955
+ "next" link jumps to just before the change).
1956
+ """
1957
+
1958
+ # make unique anchor prefixes so that multiple tables may exist
1959
+ # on the same page without conflict.
1960
+ self._make_prefix()
1961
+
1962
+ # change tabs to spaces before it gets more difficult after we insert
1963
+ # markup
1964
+ fromlines,tolines = self._tab_newline_replace(fromlines,tolines)
1965
+
1966
+ # create diffs iterator which generates side by side from/to data
1967
+ if context:
1968
+ context_lines = numlines
1969
+ else:
1970
+ context_lines = None
1971
+ diffs = _mdiff(fromlines,tolines,context_lines,linejunk=self._linejunk,
1972
+ charjunk=self._charjunk)
1973
+
1974
+ # set up iterator to wrap lines that exceed desired width
1975
+ if self._wrapcolumn:
1976
+ diffs = self._line_wrapper(diffs)
1977
+
1978
+ # collect up from/to lines and flags into lists (also format the lines)
1979
+ fromlist,tolist,flaglist = self._collect_lines(diffs)
1980
+
1981
+ # process change flags, generating middle column of next anchors/links
1982
+ fromlist,tolist,flaglist,next_href,next_id = self._convert_flags(
1983
+ fromlist,tolist,flaglist,context,numlines)
1984
+
1985
+ s = []
1986
+ fmt = ' <tr><td class="diff_next"%s>%s</td>%s' + \
1987
+ '<td class="diff_next">%s</td>%s</tr>\n'
1988
+ for i in range(len(flaglist)):
1989
+ if flaglist[i] is None:
1990
+ # mdiff yields None on separator lines skip the bogus ones
1991
+ # generated for the first line
1992
+ if i > 0:
1993
+ s.append(' </tbody> \n <tbody>\n')
1994
+ else:
1995
+ s.append( fmt % (next_id[i],next_href[i],fromlist[i],
1996
+ next_href[i],tolist[i]))
1997
+ if fromdesc or todesc:
1998
+ header_row = '<thead><tr>%s%s%s%s</tr></thead>' % (
1999
+ '<th class="diff_next"><br /></th>',
2000
+ '<th colspan="2" class="diff_header">%s</th>' % fromdesc,
2001
+ '<th class="diff_next"><br /></th>',
2002
+ '<th colspan="2" class="diff_header">%s</th>' % todesc)
2003
+ else:
2004
+ header_row = ''
2005
+
2006
+ table = self._table_template % dict(
2007
+ data_rows=''.join(s),
2008
+ header_row=header_row,
2009
+ prefix=self._prefix[1])
2010
+
2011
+ return table.replace('\0+','<span class="diff_add">'). \
2012
+ replace('\0-','<span class="diff_sub">'). \
2013
+ replace('\0^','<span class="diff_chg">'). \
2014
+ replace('\1','</span>'). \
2015
+ replace('\t','&nbsp;')
2016
+
2017
+ del re
2018
+
2019
+ def restore(delta, which):
2020
+ r"""
2021
+ Generate one of the two sequences that generated a delta.
2022
+
2023
+ Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract
2024
+ lines originating from file 1 or 2 (parameter `which`), stripping off line
2025
+ prefixes.
2026
+
2027
+ Examples:
2028
+
2029
+ >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(keepends=True),
2030
+ ... 'ore\ntree\nemu\n'.splitlines(keepends=True))
2031
+ >>> diff = list(diff)
2032
+ >>> print(''.join(restore(diff, 1)), end="")
2033
+ one
2034
+ two
2035
+ three
2036
+ >>> print(''.join(restore(diff, 2)), end="")
2037
+ ore
2038
+ tree
2039
+ emu
2040
+ """
2041
+ try:
2042
+ tag = {1: "- ", 2: "+ "}[int(which)]
2043
+ except KeyError:
2044
+ raise ValueError('unknown delta choice (must be 1 or 2): %r'
2045
+ % which) from None
2046
+ prefixes = (" ", tag)
2047
+ for line in delta:
2048
+ if line[:2] in prefixes:
2049
+ yield line[2:]
2050
+
2051
+ def _test():
2052
+ import doctest, difflib
2053
+ return doctest.testmod(difflib)
2054
+
2055
+ if __name__ == "__main__":
2056
+ _test()
janus/lib/python3.10/distutils/__init__.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils
2
+
3
+ The main package for the Python Module Distribution Utilities. Normally
4
+ used from a setup script as
5
+
6
+ from distutils.core import setup
7
+
8
+ setup (...)
9
+ """
10
+
11
+ import sys
12
+ import warnings
13
+
14
+ __version__ = sys.version[:sys.version.index(' ')]
15
+
16
+ _DEPRECATION_MESSAGE = ("The distutils package is deprecated and slated for "
17
+ "removal in Python 3.12. Use setuptools or check "
18
+ "PEP 632 for potential alternatives")
19
+ warnings.warn(_DEPRECATION_MESSAGE,
20
+ DeprecationWarning, 2)
janus/lib/python3.10/distutils/_msvccompiler.py ADDED
@@ -0,0 +1,546 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils._msvccompiler
2
+
3
+ Contains MSVCCompiler, an implementation of the abstract CCompiler class
4
+ for Microsoft Visual Studio 2015.
5
+
6
+ The module is compatible with VS 2015 and later. You can find legacy support
7
+ for older versions in distutils.msvc9compiler and distutils.msvccompiler.
8
+ """
9
+
10
+ # Written by Perry Stoll
11
+ # hacked by Robin Becker and Thomas Heller to do a better job of
12
+ # finding DevStudio (through the registry)
13
+ # ported to VS 2005 and VS 2008 by Christian Heimes
14
+ # ported to VS 2015 by Steve Dower
15
+
16
+ import os
17
+ import subprocess
18
+ import winreg
19
+
20
+ from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
21
+ CompileError, LibError, LinkError
22
+ from distutils.ccompiler import CCompiler, gen_lib_options
23
+ from distutils import log
24
+ from distutils.util import get_platform
25
+
26
+ from itertools import count
27
+
28
+ def _find_vc2015():
29
+ try:
30
+ key = winreg.OpenKeyEx(
31
+ winreg.HKEY_LOCAL_MACHINE,
32
+ r"Software\Microsoft\VisualStudio\SxS\VC7",
33
+ access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY
34
+ )
35
+ except OSError:
36
+ log.debug("Visual C++ is not registered")
37
+ return None, None
38
+
39
+ best_version = 0
40
+ best_dir = None
41
+ with key:
42
+ for i in count():
43
+ try:
44
+ v, vc_dir, vt = winreg.EnumValue(key, i)
45
+ except OSError:
46
+ break
47
+ if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
48
+ try:
49
+ version = int(float(v))
50
+ except (ValueError, TypeError):
51
+ continue
52
+ if version >= 14 and version > best_version:
53
+ best_version, best_dir = version, vc_dir
54
+ return best_version, best_dir
55
+
56
+ def _find_vc2017():
57
+ """Returns "15, path" based on the result of invoking vswhere.exe
58
+ If no install is found, returns "None, None"
59
+
60
+ The version is returned to avoid unnecessarily changing the function
61
+ result. It may be ignored when the path is not None.
62
+
63
+ If vswhere.exe is not available, by definition, VS 2017 is not
64
+ installed.
65
+ """
66
+ root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
67
+ if not root:
68
+ return None, None
69
+
70
+ try:
71
+ path = subprocess.check_output([
72
+ os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
73
+ "-latest",
74
+ "-prerelease",
75
+ "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
76
+ "-property", "installationPath",
77
+ "-products", "*",
78
+ ], encoding="mbcs", errors="strict").strip()
79
+ except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
80
+ return None, None
81
+
82
+ path = os.path.join(path, "VC", "Auxiliary", "Build")
83
+ if os.path.isdir(path):
84
+ return 15, path
85
+
86
+ return None, None
87
+
88
+ PLAT_SPEC_TO_RUNTIME = {
89
+ 'x86' : 'x86',
90
+ 'x86_amd64' : 'x64',
91
+ 'x86_arm' : 'arm',
92
+ 'x86_arm64' : 'arm64'
93
+ }
94
+
95
+ def _find_vcvarsall(plat_spec):
96
+ # bpo-38597: Removed vcruntime return value
97
+ _, best_dir = _find_vc2017()
98
+
99
+ if not best_dir:
100
+ best_version, best_dir = _find_vc2015()
101
+
102
+ if not best_dir:
103
+ log.debug("No suitable Visual C++ version found")
104
+ return None, None
105
+
106
+ vcvarsall = os.path.join(best_dir, "vcvarsall.bat")
107
+ if not os.path.isfile(vcvarsall):
108
+ log.debug("%s cannot be found", vcvarsall)
109
+ return None, None
110
+
111
+ return vcvarsall, None
112
+
113
+ def _get_vc_env(plat_spec):
114
+ if os.getenv("DISTUTILS_USE_SDK"):
115
+ return {
116
+ key.lower(): value
117
+ for key, value in os.environ.items()
118
+ }
119
+
120
+ vcvarsall, _ = _find_vcvarsall(plat_spec)
121
+ if not vcvarsall:
122
+ raise DistutilsPlatformError("Unable to find vcvarsall.bat")
123
+
124
+ try:
125
+ out = subprocess.check_output(
126
+ 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
127
+ stderr=subprocess.STDOUT,
128
+ ).decode('utf-16le', errors='replace')
129
+ except subprocess.CalledProcessError as exc:
130
+ log.error(exc.output)
131
+ raise DistutilsPlatformError("Error executing {}"
132
+ .format(exc.cmd))
133
+
134
+ env = {
135
+ key.lower(): value
136
+ for key, _, value in
137
+ (line.partition('=') for line in out.splitlines())
138
+ if key and value
139
+ }
140
+
141
+ return env
142
+
143
+ def _find_exe(exe, paths=None):
144
+ """Return path to an MSVC executable program.
145
+
146
+ Tries to find the program in several places: first, one of the
147
+ MSVC program search paths from the registry; next, the directories
148
+ in the PATH environment variable. If any of those work, return an
149
+ absolute path that is known to exist. If none of them work, just
150
+ return the original program name, 'exe'.
151
+ """
152
+ if not paths:
153
+ paths = os.getenv('path').split(os.pathsep)
154
+ for p in paths:
155
+ fn = os.path.join(os.path.abspath(p), exe)
156
+ if os.path.isfile(fn):
157
+ return fn
158
+ return exe
159
+
160
+ # A map keyed by get_platform() return values to values accepted by
161
+ # 'vcvarsall.bat'. Always cross-compile from x86 to work with the
162
+ # lighter-weight MSVC installs that do not include native 64-bit tools.
163
+ PLAT_TO_VCVARS = {
164
+ 'win32' : 'x86',
165
+ 'win-amd64' : 'x86_amd64',
166
+ 'win-arm32' : 'x86_arm',
167
+ 'win-arm64' : 'x86_arm64'
168
+ }
169
+
170
+ class MSVCCompiler(CCompiler) :
171
+ """Concrete class that implements an interface to Microsoft Visual C++,
172
+ as defined by the CCompiler abstract class."""
173
+
174
+ compiler_type = 'msvc'
175
+
176
+ # Just set this so CCompiler's constructor doesn't barf. We currently
177
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
178
+ # as it really isn't necessary for this sort of single-compiler class.
179
+ # Would be nice to have a consistent interface with UnixCCompiler,
180
+ # though, so it's worth thinking about.
181
+ executables = {}
182
+
183
+ # Private class data (need to distinguish C from C++ source for compiler)
184
+ _c_extensions = ['.c']
185
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
186
+ _rc_extensions = ['.rc']
187
+ _mc_extensions = ['.mc']
188
+
189
+ # Needed for the filename generation methods provided by the
190
+ # base class, CCompiler.
191
+ src_extensions = (_c_extensions + _cpp_extensions +
192
+ _rc_extensions + _mc_extensions)
193
+ res_extension = '.res'
194
+ obj_extension = '.obj'
195
+ static_lib_extension = '.lib'
196
+ shared_lib_extension = '.dll'
197
+ static_lib_format = shared_lib_format = '%s%s'
198
+ exe_extension = '.exe'
199
+
200
+
201
+ def __init__(self, verbose=0, dry_run=0, force=0):
202
+ CCompiler.__init__ (self, verbose, dry_run, force)
203
+ # target platform (.plat_name is consistent with 'bdist')
204
+ self.plat_name = None
205
+ self.initialized = False
206
+
207
+ def initialize(self, plat_name=None):
208
+ # multi-init means we would need to check platform same each time...
209
+ assert not self.initialized, "don't init multiple times"
210
+ if plat_name is None:
211
+ plat_name = get_platform()
212
+ # sanity check for platforms to prevent obscure errors later.
213
+ if plat_name not in PLAT_TO_VCVARS:
214
+ raise DistutilsPlatformError("--plat-name must be one of {}"
215
+ .format(tuple(PLAT_TO_VCVARS)))
216
+
217
+ # Get the vcvarsall.bat spec for the requested platform.
218
+ plat_spec = PLAT_TO_VCVARS[plat_name]
219
+
220
+ vc_env = _get_vc_env(plat_spec)
221
+ if not vc_env:
222
+ raise DistutilsPlatformError("Unable to find a compatible "
223
+ "Visual Studio installation.")
224
+
225
+ self._paths = vc_env.get('path', '')
226
+ paths = self._paths.split(os.pathsep)
227
+ self.cc = _find_exe("cl.exe", paths)
228
+ self.linker = _find_exe("link.exe", paths)
229
+ self.lib = _find_exe("lib.exe", paths)
230
+ self.rc = _find_exe("rc.exe", paths) # resource compiler
231
+ self.mc = _find_exe("mc.exe", paths) # message compiler
232
+ self.mt = _find_exe("mt.exe", paths) # message compiler
233
+
234
+ for dir in vc_env.get('include', '').split(os.pathsep):
235
+ if dir:
236
+ self.add_include_dir(dir.rstrip(os.sep))
237
+
238
+ for dir in vc_env.get('lib', '').split(os.pathsep):
239
+ if dir:
240
+ self.add_library_dir(dir.rstrip(os.sep))
241
+
242
+ self.preprocess_options = None
243
+ # bpo-38597: Always compile with dynamic linking
244
+ # Future releases of Python 3.x will include all past
245
+ # versions of vcruntime*.dll for compatibility.
246
+ self.compile_options = [
247
+ '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD'
248
+ ]
249
+
250
+ self.compile_options_debug = [
251
+ '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG'
252
+ ]
253
+
254
+ ldflags = [
255
+ '/nologo', '/INCREMENTAL:NO', '/LTCG'
256
+ ]
257
+
258
+ ldflags_debug = [
259
+ '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'
260
+ ]
261
+
262
+ self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1']
263
+ self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1']
264
+ self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
265
+ self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
266
+ self.ldflags_static = [*ldflags]
267
+ self.ldflags_static_debug = [*ldflags_debug]
268
+
269
+ self._ldflags = {
270
+ (CCompiler.EXECUTABLE, None): self.ldflags_exe,
271
+ (CCompiler.EXECUTABLE, False): self.ldflags_exe,
272
+ (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
273
+ (CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
274
+ (CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
275
+ (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
276
+ (CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
277
+ (CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
278
+ (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
279
+ }
280
+
281
+ self.initialized = True
282
+
283
+ # -- Worker methods ------------------------------------------------
284
+
285
+ def object_filenames(self,
286
+ source_filenames,
287
+ strip_dir=0,
288
+ output_dir=''):
289
+ ext_map = {
290
+ **{ext: self.obj_extension for ext in self.src_extensions},
291
+ **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions},
292
+ }
293
+
294
+ output_dir = output_dir or ''
295
+
296
+ def make_out_path(p):
297
+ base, ext = os.path.splitext(p)
298
+ if strip_dir:
299
+ base = os.path.basename(base)
300
+ else:
301
+ _, base = os.path.splitdrive(base)
302
+ if base.startswith((os.path.sep, os.path.altsep)):
303
+ base = base[1:]
304
+ try:
305
+ # XXX: This may produce absurdly long paths. We should check
306
+ # the length of the result and trim base until we fit within
307
+ # 260 characters.
308
+ return os.path.join(output_dir, base + ext_map[ext])
309
+ except LookupError:
310
+ # Better to raise an exception instead of silently continuing
311
+ # and later complain about sources and targets having
312
+ # different lengths
313
+ raise CompileError("Don't know how to compile {}".format(p))
314
+
315
+ return list(map(make_out_path, source_filenames))
316
+
317
+
318
+ def compile(self, sources,
319
+ output_dir=None, macros=None, include_dirs=None, debug=0,
320
+ extra_preargs=None, extra_postargs=None, depends=None):
321
+
322
+ if not self.initialized:
323
+ self.initialize()
324
+ compile_info = self._setup_compile(output_dir, macros, include_dirs,
325
+ sources, depends, extra_postargs)
326
+ macros, objects, extra_postargs, pp_opts, build = compile_info
327
+
328
+ compile_opts = extra_preargs or []
329
+ compile_opts.append('/c')
330
+ if debug:
331
+ compile_opts.extend(self.compile_options_debug)
332
+ else:
333
+ compile_opts.extend(self.compile_options)
334
+
335
+
336
+ add_cpp_opts = False
337
+
338
+ for obj in objects:
339
+ try:
340
+ src, ext = build[obj]
341
+ except KeyError:
342
+ continue
343
+ if debug:
344
+ # pass the full pathname to MSVC in debug mode,
345
+ # this allows the debugger to find the source file
346
+ # without asking the user to browse for it
347
+ src = os.path.abspath(src)
348
+
349
+ # Anaconda/conda-forge customisation, we want our pdbs to be
350
+ # relocatable:
351
+ # https://developercommunity.visualstudio.com/comments/623156/view.html
352
+ d1trimfile_opts = []
353
+ if 'SRC_DIR' in os.environ and os.path.basename(self.cc) == "cl.exe":
354
+ d1trimfile_opts.append("/d1trimfile:" + os.environ['SRC_DIR'])
355
+
356
+ if ext in self._c_extensions:
357
+ input_opt = "/Tc" + src
358
+ elif ext in self._cpp_extensions:
359
+ input_opt = "/Tp" + src
360
+ add_cpp_opts = True
361
+ elif ext in self._rc_extensions:
362
+ # compile .RC to .RES file
363
+ input_opt = src
364
+ output_opt = "/fo" + obj
365
+ try:
366
+ self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
367
+ except DistutilsExecError as msg:
368
+ raise CompileError(msg)
369
+ continue
370
+ elif ext in self._mc_extensions:
371
+ # Compile .MC to .RC file to .RES file.
372
+ # * '-h dir' specifies the directory for the
373
+ # generated include file
374
+ # * '-r dir' specifies the target directory of the
375
+ # generated RC file and the binary message resource
376
+ # it includes
377
+ #
378
+ # For now (since there are no options to change this),
379
+ # we use the source-directory for the include file and
380
+ # the build directory for the RC file and message
381
+ # resources. This works at least for win32all.
382
+ h_dir = os.path.dirname(src)
383
+ rc_dir = os.path.dirname(obj)
384
+ try:
385
+ # first compile .MC to .RC and .H file
386
+ self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
387
+ base, _ = os.path.splitext(os.path.basename (src))
388
+ rc_file = os.path.join(rc_dir, base + '.rc')
389
+ # then compile .RC to .RES file
390
+ self.spawn([self.rc, "/fo" + obj, rc_file])
391
+
392
+ except DistutilsExecError as msg:
393
+ raise CompileError(msg)
394
+ continue
395
+ else:
396
+ # how to handle this file?
397
+ raise CompileError("Don't know how to compile {} to {}"
398
+ .format(src, obj))
399
+
400
+ args = [self.cc] + compile_opts + pp_opts + d1trimfile_opts
401
+ if add_cpp_opts:
402
+ args.append('/EHsc')
403
+ args.append(input_opt)
404
+ args.append("/Fo" + obj)
405
+ args.extend(extra_postargs)
406
+
407
+ try:
408
+ self.spawn(args)
409
+ except DistutilsExecError as msg:
410
+ raise CompileError(msg)
411
+
412
+ return objects
413
+
414
+
415
+ def create_static_lib(self,
416
+ objects,
417
+ output_libname,
418
+ output_dir=None,
419
+ debug=0,
420
+ target_lang=None):
421
+
422
+ if not self.initialized:
423
+ self.initialize()
424
+ objects, output_dir = self._fix_object_args(objects, output_dir)
425
+ output_filename = self.library_filename(output_libname,
426
+ output_dir=output_dir)
427
+
428
+ if self._need_link(objects, output_filename):
429
+ lib_args = objects + ['/OUT:' + output_filename]
430
+ if debug:
431
+ pass # XXX what goes here?
432
+ try:
433
+ log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args))
434
+ self.spawn([self.lib] + lib_args)
435
+ except DistutilsExecError as msg:
436
+ raise LibError(msg)
437
+ else:
438
+ log.debug("skipping %s (up-to-date)", output_filename)
439
+
440
+
441
+ def link(self,
442
+ target_desc,
443
+ objects,
444
+ output_filename,
445
+ output_dir=None,
446
+ libraries=None,
447
+ library_dirs=None,
448
+ runtime_library_dirs=None,
449
+ export_symbols=None,
450
+ debug=0,
451
+ extra_preargs=None,
452
+ extra_postargs=None,
453
+ build_temp=None,
454
+ target_lang=None):
455
+
456
+ if not self.initialized:
457
+ self.initialize()
458
+ objects, output_dir = self._fix_object_args(objects, output_dir)
459
+ fixed_args = self._fix_lib_args(libraries, library_dirs,
460
+ runtime_library_dirs)
461
+ libraries, library_dirs, runtime_library_dirs = fixed_args
462
+
463
+ if runtime_library_dirs:
464
+ self.warn("I don't know what to do with 'runtime_library_dirs': "
465
+ + str(runtime_library_dirs))
466
+
467
+ lib_opts = gen_lib_options(self,
468
+ library_dirs, runtime_library_dirs,
469
+ libraries)
470
+ if output_dir is not None:
471
+ output_filename = os.path.join(output_dir, output_filename)
472
+
473
+ if self._need_link(objects, output_filename):
474
+ ldflags = self._ldflags[target_desc, debug]
475
+
476
+ export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])]
477
+
478
+ ld_args = (ldflags + lib_opts + export_opts +
479
+ objects + ['/OUT:' + output_filename])
480
+
481
+ # The MSVC linker generates .lib and .exp files, which cannot be
482
+ # suppressed by any linker switches. The .lib files may even be
483
+ # needed! Make sure they are generated in the temporary build
484
+ # directory. Since they have different names for debug and release
485
+ # builds, they can go into the same directory.
486
+ build_temp = os.path.dirname(objects[0])
487
+ if export_symbols is not None:
488
+ (dll_name, dll_ext) = os.path.splitext(
489
+ os.path.basename(output_filename))
490
+ implib_file = os.path.join(
491
+ build_temp,
492
+ self.library_filename(dll_name))
493
+ ld_args.append ('/IMPLIB:' + implib_file)
494
+
495
+ if extra_preargs:
496
+ ld_args[:0] = extra_preargs
497
+ if extra_postargs:
498
+ ld_args.extend(extra_postargs)
499
+
500
+ output_dir = os.path.dirname(os.path.abspath(output_filename))
501
+ self.mkpath(output_dir)
502
+ try:
503
+ log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
504
+ self.spawn([self.linker] + ld_args)
505
+ except DistutilsExecError as msg:
506
+ raise LinkError(msg)
507
+ else:
508
+ log.debug("skipping %s (up-to-date)", output_filename)
509
+
510
+ def spawn(self, cmd):
511
+ old_path = os.getenv('path')
512
+ try:
513
+ os.environ['path'] = self._paths
514
+ return super().spawn(cmd)
515
+ finally:
516
+ os.environ['path'] = old_path
517
+
518
+ # -- Miscellaneous methods -----------------------------------------
519
+ # These are all used by the 'gen_lib_options() function, in
520
+ # ccompiler.py.
521
+
522
+ def library_dir_option(self, dir):
523
+ return "/LIBPATH:" + dir
524
+
525
+ def runtime_library_dir_option(self, dir):
526
+ raise DistutilsPlatformError(
527
+ "don't know how to set runtime library search path for MSVC")
528
+
529
+ def library_option(self, lib):
530
+ return self.library_filename(lib)
531
+
532
+ def find_library_file(self, dirs, lib, debug=0):
533
+ # Prefer a debugging library if found (and requested), but deal
534
+ # with it if we don't have one.
535
+ if debug:
536
+ try_names = [lib + "_d", lib]
537
+ else:
538
+ try_names = [lib]
539
+ for dir in dirs:
540
+ for name in try_names:
541
+ libfile = os.path.join(dir, self.library_filename(name))
542
+ if os.path.isfile(libfile):
543
+ return libfile
544
+ else:
545
+ # Oops, didn't find it in *any* of 'dirs'
546
+ return None
janus/lib/python3.10/distutils/cmd.py ADDED
@@ -0,0 +1,403 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.cmd
2
+
3
+ Provides the Command class, the base class for the command classes
4
+ in the distutils.command package.
5
+ """
6
+
7
+ import sys, os, re
8
+ from distutils.errors import DistutilsOptionError
9
+ from distutils import util, dir_util, file_util, archive_util, dep_util
10
+ from distutils import log
11
+
12
+ class Command:
13
+ """Abstract base class for defining command classes, the "worker bees"
14
+ of the Distutils. A useful analogy for command classes is to think of
15
+ them as subroutines with local variables called "options". The options
16
+ are "declared" in 'initialize_options()' and "defined" (given their
17
+ final values, aka "finalized") in 'finalize_options()', both of which
18
+ must be defined by every command class. The distinction between the
19
+ two is necessary because option values might come from the outside
20
+ world (command line, config file, ...), and any options dependent on
21
+ other options must be computed *after* these outside influences have
22
+ been processed -- hence 'finalize_options()'. The "body" of the
23
+ subroutine, where it does all its work based on the values of its
24
+ options, is the 'run()' method, which must also be implemented by every
25
+ command class.
26
+ """
27
+
28
+ # 'sub_commands' formalizes the notion of a "family" of commands,
29
+ # eg. "install" as the parent with sub-commands "install_lib",
30
+ # "install_headers", etc. The parent of a family of commands
31
+ # defines 'sub_commands' as a class attribute; it's a list of
32
+ # (command_name : string, predicate : unbound_method | string | None)
33
+ # tuples, where 'predicate' is a method of the parent command that
34
+ # determines whether the corresponding command is applicable in the
35
+ # current situation. (Eg. we "install_headers" is only applicable if
36
+ # we have any C header files to install.) If 'predicate' is None,
37
+ # that command is always applicable.
38
+ #
39
+ # 'sub_commands' is usually defined at the *end* of a class, because
40
+ # predicates can be unbound methods, so they must already have been
41
+ # defined. The canonical example is the "install" command.
42
+ sub_commands = []
43
+
44
+
45
+ # -- Creation/initialization methods -------------------------------
46
+
47
+ def __init__(self, dist):
48
+ """Create and initialize a new Command object. Most importantly,
49
+ invokes the 'initialize_options()' method, which is the real
50
+ initializer and depends on the actual command being
51
+ instantiated.
52
+ """
53
+ # late import because of mutual dependence between these classes
54
+ from distutils.dist import Distribution
55
+
56
+ if not isinstance(dist, Distribution):
57
+ raise TypeError("dist must be a Distribution instance")
58
+ if self.__class__ is Command:
59
+ raise RuntimeError("Command is an abstract class")
60
+
61
+ self.distribution = dist
62
+ self.initialize_options()
63
+
64
+ # Per-command versions of the global flags, so that the user can
65
+ # customize Distutils' behaviour command-by-command and let some
66
+ # commands fall back on the Distribution's behaviour. None means
67
+ # "not defined, check self.distribution's copy", while 0 or 1 mean
68
+ # false and true (duh). Note that this means figuring out the real
69
+ # value of each flag is a touch complicated -- hence "self._dry_run"
70
+ # will be handled by __getattr__, below.
71
+ # XXX This needs to be fixed.
72
+ self._dry_run = None
73
+
74
+ # verbose is largely ignored, but needs to be set for
75
+ # backwards compatibility (I think)?
76
+ self.verbose = dist.verbose
77
+
78
+ # Some commands define a 'self.force' option to ignore file
79
+ # timestamps, but methods defined *here* assume that
80
+ # 'self.force' exists for all commands. So define it here
81
+ # just to be safe.
82
+ self.force = None
83
+
84
+ # The 'help' flag is just used for command-line parsing, so
85
+ # none of that complicated bureaucracy is needed.
86
+ self.help = 0
87
+
88
+ # 'finalized' records whether or not 'finalize_options()' has been
89
+ # called. 'finalize_options()' itself should not pay attention to
90
+ # this flag: it is the business of 'ensure_finalized()', which
91
+ # always calls 'finalize_options()', to respect/update it.
92
+ self.finalized = 0
93
+
94
+ # XXX A more explicit way to customize dry_run would be better.
95
+ def __getattr__(self, attr):
96
+ if attr == 'dry_run':
97
+ myval = getattr(self, "_" + attr)
98
+ if myval is None:
99
+ return getattr(self.distribution, attr)
100
+ else:
101
+ return myval
102
+ else:
103
+ raise AttributeError(attr)
104
+
105
+ def ensure_finalized(self):
106
+ if not self.finalized:
107
+ self.finalize_options()
108
+ self.finalized = 1
109
+
110
+ # Subclasses must define:
111
+ # initialize_options()
112
+ # provide default values for all options; may be customized by
113
+ # setup script, by options from config file(s), or by command-line
114
+ # options
115
+ # finalize_options()
116
+ # decide on the final values for all options; this is called
117
+ # after all possible intervention from the outside world
118
+ # (command-line, option file, etc.) has been processed
119
+ # run()
120
+ # run the command: do whatever it is we're here to do,
121
+ # controlled by the command's various option values
122
+
123
+ def initialize_options(self):
124
+ """Set default values for all the options that this command
125
+ supports. Note that these defaults may be overridden by other
126
+ commands, by the setup script, by config files, or by the
127
+ command-line. Thus, this is not the place to code dependencies
128
+ between options; generally, 'initialize_options()' implementations
129
+ are just a bunch of "self.foo = None" assignments.
130
+
131
+ This method must be implemented by all command classes.
132
+ """
133
+ raise RuntimeError("abstract method -- subclass %s must override"
134
+ % self.__class__)
135
+
136
+ def finalize_options(self):
137
+ """Set final values for all the options that this command supports.
138
+ This is always called as late as possible, ie. after any option
139
+ assignments from the command-line or from other commands have been
140
+ done. Thus, this is the place to code option dependencies: if
141
+ 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
142
+ long as 'foo' still has the same value it was assigned in
143
+ 'initialize_options()'.
144
+
145
+ This method must be implemented by all command classes.
146
+ """
147
+ raise RuntimeError("abstract method -- subclass %s must override"
148
+ % self.__class__)
149
+
150
+
151
+ def dump_options(self, header=None, indent=""):
152
+ from distutils.fancy_getopt import longopt_xlate
153
+ if header is None:
154
+ header = "command options for '%s':" % self.get_command_name()
155
+ self.announce(indent + header, level=log.INFO)
156
+ indent = indent + " "
157
+ for (option, _, _) in self.user_options:
158
+ option = option.translate(longopt_xlate)
159
+ if option[-1] == "=":
160
+ option = option[:-1]
161
+ value = getattr(self, option)
162
+ self.announce(indent + "%s = %s" % (option, value),
163
+ level=log.INFO)
164
+
165
+ def run(self):
166
+ """A command's raison d'etre: carry out the action it exists to
167
+ perform, controlled by the options initialized in
168
+ 'initialize_options()', customized by other commands, the setup
169
+ script, the command-line, and config files, and finalized in
170
+ 'finalize_options()'. All terminal output and filesystem
171
+ interaction should be done by 'run()'.
172
+
173
+ This method must be implemented by all command classes.
174
+ """
175
+ raise RuntimeError("abstract method -- subclass %s must override"
176
+ % self.__class__)
177
+
178
+ def announce(self, msg, level=1):
179
+ """If the current verbosity level is of greater than or equal to
180
+ 'level' print 'msg' to stdout.
181
+ """
182
+ log.log(level, msg)
183
+
184
+ def debug_print(self, msg):
185
+ """Print 'msg' to stdout if the global DEBUG (taken from the
186
+ DISTUTILS_DEBUG environment variable) flag is true.
187
+ """
188
+ from distutils.debug import DEBUG
189
+ if DEBUG:
190
+ print(msg)
191
+ sys.stdout.flush()
192
+
193
+
194
+ # -- Option validation methods -------------------------------------
195
+ # (these are very handy in writing the 'finalize_options()' method)
196
+ #
197
+ # NB. the general philosophy here is to ensure that a particular option
198
+ # value meets certain type and value constraints. If not, we try to
199
+ # force it into conformance (eg. if we expect a list but have a string,
200
+ # split the string on comma and/or whitespace). If we can't force the
201
+ # option into conformance, raise DistutilsOptionError. Thus, command
202
+ # classes need do nothing more than (eg.)
203
+ # self.ensure_string_list('foo')
204
+ # and they can be guaranteed that thereafter, self.foo will be
205
+ # a list of strings.
206
+
207
+ def _ensure_stringlike(self, option, what, default=None):
208
+ val = getattr(self, option)
209
+ if val is None:
210
+ setattr(self, option, default)
211
+ return default
212
+ elif not isinstance(val, str):
213
+ raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
214
+ % (option, what, val))
215
+ return val
216
+
217
+ def ensure_string(self, option, default=None):
218
+ """Ensure that 'option' is a string; if not defined, set it to
219
+ 'default'.
220
+ """
221
+ self._ensure_stringlike(option, "string", default)
222
+
223
+ def ensure_string_list(self, option):
224
+ r"""Ensure that 'option' is a list of strings. If 'option' is
225
+ currently a string, we split it either on /,\s*/ or /\s+/, so
226
+ "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
227
+ ["foo", "bar", "baz"].
228
+ """
229
+ val = getattr(self, option)
230
+ if val is None:
231
+ return
232
+ elif isinstance(val, str):
233
+ setattr(self, option, re.split(r',\s*|\s+', val))
234
+ else:
235
+ if isinstance(val, list):
236
+ ok = all(isinstance(v, str) for v in val)
237
+ else:
238
+ ok = False
239
+ if not ok:
240
+ raise DistutilsOptionError(
241
+ "'%s' must be a list of strings (got %r)"
242
+ % (option, val))
243
+
244
+ def _ensure_tested_string(self, option, tester, what, error_fmt,
245
+ default=None):
246
+ val = self._ensure_stringlike(option, what, default)
247
+ if val is not None and not tester(val):
248
+ raise DistutilsOptionError(("error in '%s' option: " + error_fmt)
249
+ % (option, val))
250
+
251
+ def ensure_filename(self, option):
252
+ """Ensure that 'option' is the name of an existing file."""
253
+ self._ensure_tested_string(option, os.path.isfile,
254
+ "filename",
255
+ "'%s' does not exist or is not a file")
256
+
257
+ def ensure_dirname(self, option):
258
+ self._ensure_tested_string(option, os.path.isdir,
259
+ "directory name",
260
+ "'%s' does not exist or is not a directory")
261
+
262
+
263
+ # -- Convenience methods for commands ------------------------------
264
+
265
+ def get_command_name(self):
266
+ if hasattr(self, 'command_name'):
267
+ return self.command_name
268
+ else:
269
+ return self.__class__.__name__
270
+
271
+ def set_undefined_options(self, src_cmd, *option_pairs):
272
+ """Set the values of any "undefined" options from corresponding
273
+ option values in some other command object. "Undefined" here means
274
+ "is None", which is the convention used to indicate that an option
275
+ has not been changed between 'initialize_options()' and
276
+ 'finalize_options()'. Usually called from 'finalize_options()' for
277
+ options that depend on some other command rather than another
278
+ option of the same command. 'src_cmd' is the other command from
279
+ which option values will be taken (a command object will be created
280
+ for it if necessary); the remaining arguments are
281
+ '(src_option,dst_option)' tuples which mean "take the value of
282
+ 'src_option' in the 'src_cmd' command object, and copy it to
283
+ 'dst_option' in the current command object".
284
+ """
285
+ # Option_pairs: list of (src_option, dst_option) tuples
286
+ src_cmd_obj = self.distribution.get_command_obj(src_cmd)
287
+ src_cmd_obj.ensure_finalized()
288
+ for (src_option, dst_option) in option_pairs:
289
+ if getattr(self, dst_option) is None:
290
+ setattr(self, dst_option, getattr(src_cmd_obj, src_option))
291
+
292
+ def get_finalized_command(self, command, create=1):
293
+ """Wrapper around Distribution's 'get_command_obj()' method: find
294
+ (create if necessary and 'create' is true) the command object for
295
+ 'command', call its 'ensure_finalized()' method, and return the
296
+ finalized command object.
297
+ """
298
+ cmd_obj = self.distribution.get_command_obj(command, create)
299
+ cmd_obj.ensure_finalized()
300
+ return cmd_obj
301
+
302
+ # XXX rename to 'get_reinitialized_command()'? (should do the
303
+ # same in dist.py, if so)
304
+ def reinitialize_command(self, command, reinit_subcommands=0):
305
+ return self.distribution.reinitialize_command(command,
306
+ reinit_subcommands)
307
+
308
+ def run_command(self, command):
309
+ """Run some other command: uses the 'run_command()' method of
310
+ Distribution, which creates and finalizes the command object if
311
+ necessary and then invokes its 'run()' method.
312
+ """
313
+ self.distribution.run_command(command)
314
+
315
+ def get_sub_commands(self):
316
+ """Determine the sub-commands that are relevant in the current
317
+ distribution (ie., that need to be run). This is based on the
318
+ 'sub_commands' class attribute: each tuple in that list may include
319
+ a method that we call to determine if the subcommand needs to be
320
+ run for the current distribution. Return a list of command names.
321
+ """
322
+ commands = []
323
+ for (cmd_name, method) in self.sub_commands:
324
+ if method is None or method(self):
325
+ commands.append(cmd_name)
326
+ return commands
327
+
328
+
329
+ # -- External world manipulation -----------------------------------
330
+
331
+ def warn(self, msg):
332
+ log.warn("warning: %s: %s\n", self.get_command_name(), msg)
333
+
334
+ def execute(self, func, args, msg=None, level=1):
335
+ util.execute(func, args, msg, dry_run=self.dry_run)
336
+
337
+ def mkpath(self, name, mode=0o777):
338
+ dir_util.mkpath(name, mode, dry_run=self.dry_run)
339
+
340
+ def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
341
+ link=None, level=1):
342
+ """Copy a file respecting verbose, dry-run and force flags. (The
343
+ former two default to whatever is in the Distribution object, and
344
+ the latter defaults to false for commands that don't define it.)"""
345
+ return file_util.copy_file(infile, outfile, preserve_mode,
346
+ preserve_times, not self.force, link,
347
+ dry_run=self.dry_run)
348
+
349
+ def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1,
350
+ preserve_symlinks=0, level=1):
351
+ """Copy an entire directory tree respecting verbose, dry-run,
352
+ and force flags.
353
+ """
354
+ return dir_util.copy_tree(infile, outfile, preserve_mode,
355
+ preserve_times, preserve_symlinks,
356
+ not self.force, dry_run=self.dry_run)
357
+
358
+ def move_file (self, src, dst, level=1):
359
+ """Move a file respecting dry-run flag."""
360
+ return file_util.move_file(src, dst, dry_run=self.dry_run)
361
+
362
+ def spawn(self, cmd, search_path=1, level=1):
363
+ """Spawn an external command respecting dry-run flag."""
364
+ from distutils.spawn import spawn
365
+ spawn(cmd, search_path, dry_run=self.dry_run)
366
+
367
+ def make_archive(self, base_name, format, root_dir=None, base_dir=None,
368
+ owner=None, group=None):
369
+ return archive_util.make_archive(base_name, format, root_dir, base_dir,
370
+ dry_run=self.dry_run,
371
+ owner=owner, group=group)
372
+
373
+ def make_file(self, infiles, outfile, func, args,
374
+ exec_msg=None, skip_msg=None, level=1):
375
+ """Special case of 'execute()' for operations that process one or
376
+ more input files and generate one output file. Works just like
377
+ 'execute()', except the operation is skipped and a different
378
+ message printed if 'outfile' already exists and is newer than all
379
+ files listed in 'infiles'. If the command defined 'self.force',
380
+ and it is true, then the command is unconditionally run -- does no
381
+ timestamp checks.
382
+ """
383
+ if skip_msg is None:
384
+ skip_msg = "skipping %s (inputs unchanged)" % outfile
385
+
386
+ # Allow 'infiles' to be a single string
387
+ if isinstance(infiles, str):
388
+ infiles = (infiles,)
389
+ elif not isinstance(infiles, (list, tuple)):
390
+ raise TypeError(
391
+ "'infiles' must be a string, or a list or tuple of strings")
392
+
393
+ if exec_msg is None:
394
+ exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
395
+
396
+ # If 'outfile' must be regenerated (either because it doesn't
397
+ # exist, is out-of-date, or the 'force' flag is true) then
398
+ # perform the action that presumably regenerates it
399
+ if self.force or dep_util.newer_group(infiles, outfile):
400
+ self.execute(func, args, exec_msg, level)
401
+ # Otherwise, print the "skip" message
402
+ else:
403
+ log.debug(skip_msg)
janus/lib/python3.10/distutils/config.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.pypirc
2
+
3
+ Provides the PyPIRCCommand class, the base class for the command classes
4
+ that uses .pypirc in the distutils.command package.
5
+ """
6
+ import os
7
+ from configparser import RawConfigParser
8
+
9
+ from distutils.cmd import Command
10
+
11
+ DEFAULT_PYPIRC = """\
12
+ [distutils]
13
+ index-servers =
14
+ pypi
15
+
16
+ [pypi]
17
+ username:%s
18
+ password:%s
19
+ """
20
+
21
+ class PyPIRCCommand(Command):
22
+ """Base command that knows how to handle the .pypirc file
23
+ """
24
+ DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
25
+ DEFAULT_REALM = 'pypi'
26
+ repository = None
27
+ realm = None
28
+
29
+ user_options = [
30
+ ('repository=', 'r',
31
+ "url of repository [default: %s]" % \
32
+ DEFAULT_REPOSITORY),
33
+ ('show-response', None,
34
+ 'display full response text from server')]
35
+
36
+ boolean_options = ['show-response']
37
+
38
+ def _get_rc_file(self):
39
+ """Returns rc file path."""
40
+ return os.path.join(os.path.expanduser('~'), '.pypirc')
41
+
42
+ def _store_pypirc(self, username, password):
43
+ """Creates a default .pypirc file."""
44
+ rc = self._get_rc_file()
45
+ with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
46
+ f.write(DEFAULT_PYPIRC % (username, password))
47
+
48
+ def _read_pypirc(self):
49
+ """Reads the .pypirc file."""
50
+ rc = self._get_rc_file()
51
+ if os.path.exists(rc):
52
+ self.announce('Using PyPI login from %s' % rc)
53
+ repository = self.repository or self.DEFAULT_REPOSITORY
54
+
55
+ config = RawConfigParser()
56
+ config.read(rc)
57
+ sections = config.sections()
58
+ if 'distutils' in sections:
59
+ # let's get the list of servers
60
+ index_servers = config.get('distutils', 'index-servers')
61
+ _servers = [server.strip() for server in
62
+ index_servers.split('\n')
63
+ if server.strip() != '']
64
+ if _servers == []:
65
+ # nothing set, let's try to get the default pypi
66
+ if 'pypi' in sections:
67
+ _servers = ['pypi']
68
+ else:
69
+ # the file is not properly defined, returning
70
+ # an empty dict
71
+ return {}
72
+ for server in _servers:
73
+ current = {'server': server}
74
+ current['username'] = config.get(server, 'username')
75
+
76
+ # optional params
77
+ for key, default in (('repository',
78
+ self.DEFAULT_REPOSITORY),
79
+ ('realm', self.DEFAULT_REALM),
80
+ ('password', None)):
81
+ if config.has_option(server, key):
82
+ current[key] = config.get(server, key)
83
+ else:
84
+ current[key] = default
85
+
86
+ # work around people having "repository" for the "pypi"
87
+ # section of their config set to the HTTP (rather than
88
+ # HTTPS) URL
89
+ if (server == 'pypi' and
90
+ repository in (self.DEFAULT_REPOSITORY, 'pypi')):
91
+ current['repository'] = self.DEFAULT_REPOSITORY
92
+ return current
93
+
94
+ if (current['server'] == repository or
95
+ current['repository'] == repository):
96
+ return current
97
+ elif 'server-login' in sections:
98
+ # old format
99
+ server = 'server-login'
100
+ if config.has_option(server, 'repository'):
101
+ repository = config.get(server, 'repository')
102
+ else:
103
+ repository = self.DEFAULT_REPOSITORY
104
+ return {'username': config.get(server, 'username'),
105
+ 'password': config.get(server, 'password'),
106
+ 'repository': repository,
107
+ 'server': server,
108
+ 'realm': self.DEFAULT_REALM}
109
+
110
+ return {}
111
+
112
+ def _read_pypi_response(self, response):
113
+ """Read and decode a PyPI HTTP response."""
114
+ import cgi
115
+ content_type = response.getheader('content-type', 'text/plain')
116
+ encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
117
+ return response.read().decode(encoding)
118
+
119
+ def initialize_options(self):
120
+ """Initialize options."""
121
+ self.repository = None
122
+ self.realm = None
123
+ self.show_response = 0
124
+
125
+ def finalize_options(self):
126
+ """Finalizes options."""
127
+ if self.repository is None:
128
+ self.repository = self.DEFAULT_REPOSITORY
129
+ if self.realm is None:
130
+ self.realm = self.DEFAULT_REALM
janus/lib/python3.10/distutils/core.py ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.core
2
+
3
+ The only module that needs to be imported to use the Distutils; provides
4
+ the 'setup' function (which is to be called from the setup script). Also
5
+ indirectly provides the Distribution and Command classes, although they are
6
+ really defined in distutils.dist and distutils.cmd.
7
+ """
8
+
9
+ import os
10
+ import sys
11
+
12
+ from distutils.debug import DEBUG
13
+ from distutils.errors import *
14
+
15
+ # Mainly import these so setup scripts can "from distutils.core import" them.
16
+ from distutils.dist import Distribution
17
+ from distutils.cmd import Command
18
+ from distutils.config import PyPIRCCommand
19
+ from distutils.extension import Extension
20
+
21
+ # This is a barebones help message generated displayed when the user
22
+ # runs the setup script with no arguments at all. More useful help
23
+ # is generated with various --help options: global help, list commands,
24
+ # and per-command help.
25
+ USAGE = """\
26
+ usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
27
+ or: %(script)s --help [cmd1 cmd2 ...]
28
+ or: %(script)s --help-commands
29
+ or: %(script)s cmd --help
30
+ """
31
+
32
+ def gen_usage (script_name):
33
+ script = os.path.basename(script_name)
34
+ return USAGE % vars()
35
+
36
+
37
+ # Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
38
+ _setup_stop_after = None
39
+ _setup_distribution = None
40
+
41
+ # Legal keyword arguments for the setup() function
42
+ setup_keywords = ('distclass', 'script_name', 'script_args', 'options',
43
+ 'name', 'version', 'author', 'author_email',
44
+ 'maintainer', 'maintainer_email', 'url', 'license',
45
+ 'description', 'long_description', 'keywords',
46
+ 'platforms', 'classifiers', 'download_url',
47
+ 'requires', 'provides', 'obsoletes',
48
+ )
49
+
50
+ # Legal keyword arguments for the Extension constructor
51
+ extension_keywords = ('name', 'sources', 'include_dirs',
52
+ 'define_macros', 'undef_macros',
53
+ 'library_dirs', 'libraries', 'runtime_library_dirs',
54
+ 'extra_objects', 'extra_compile_args', 'extra_link_args',
55
+ 'swig_opts', 'export_symbols', 'depends', 'language')
56
+
57
+ def setup (**attrs):
58
+ """The gateway to the Distutils: do everything your setup script needs
59
+ to do, in a highly flexible and user-driven way. Briefly: create a
60
+ Distribution instance; find and parse config files; parse the command
61
+ line; run each Distutils command found there, customized by the options
62
+ supplied to 'setup()' (as keyword arguments), in config files, and on
63
+ the command line.
64
+
65
+ The Distribution instance might be an instance of a class supplied via
66
+ the 'distclass' keyword argument to 'setup'; if no such class is
67
+ supplied, then the Distribution class (in dist.py) is instantiated.
68
+ All other arguments to 'setup' (except for 'cmdclass') are used to set
69
+ attributes of the Distribution instance.
70
+
71
+ The 'cmdclass' argument, if supplied, is a dictionary mapping command
72
+ names to command classes. Each command encountered on the command line
73
+ will be turned into a command class, which is in turn instantiated; any
74
+ class found in 'cmdclass' is used in place of the default, which is
75
+ (for command 'foo_bar') class 'foo_bar' in module
76
+ 'distutils.command.foo_bar'. The command class must provide a
77
+ 'user_options' attribute which is a list of option specifiers for
78
+ 'distutils.fancy_getopt'. Any command-line options between the current
79
+ and the next command are used to set attributes of the current command
80
+ object.
81
+
82
+ When the entire command-line has been successfully parsed, calls the
83
+ 'run()' method on each command object in turn. This method will be
84
+ driven entirely by the Distribution object (which each command object
85
+ has a reference to, thanks to its constructor), and the
86
+ command-specific options that became attributes of each command
87
+ object.
88
+ """
89
+
90
+ global _setup_stop_after, _setup_distribution
91
+
92
+ # Determine the distribution class -- either caller-supplied or
93
+ # our Distribution (see below).
94
+ klass = attrs.get('distclass')
95
+ if klass:
96
+ del attrs['distclass']
97
+ else:
98
+ klass = Distribution
99
+
100
+ if 'script_name' not in attrs:
101
+ attrs['script_name'] = os.path.basename(sys.argv[0])
102
+ if 'script_args' not in attrs:
103
+ attrs['script_args'] = sys.argv[1:]
104
+
105
+ # Create the Distribution instance, using the remaining arguments
106
+ # (ie. everything except distclass) to initialize it
107
+ try:
108
+ _setup_distribution = dist = klass(attrs)
109
+ except DistutilsSetupError as msg:
110
+ if 'name' not in attrs:
111
+ raise SystemExit("error in setup command: %s" % msg)
112
+ else:
113
+ raise SystemExit("error in %s setup command: %s" % \
114
+ (attrs['name'], msg))
115
+
116
+ if _setup_stop_after == "init":
117
+ return dist
118
+
119
+ # Find and parse the config file(s): they will override options from
120
+ # the setup script, but be overridden by the command line.
121
+ dist.parse_config_files()
122
+
123
+ if DEBUG:
124
+ print("options (after parsing config files):")
125
+ dist.dump_option_dicts()
126
+
127
+ if _setup_stop_after == "config":
128
+ return dist
129
+
130
+ # Parse the command line and override config files; any
131
+ # command-line errors are the end user's fault, so turn them into
132
+ # SystemExit to suppress tracebacks.
133
+ try:
134
+ ok = dist.parse_command_line()
135
+ except DistutilsArgError as msg:
136
+ raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
137
+
138
+ if DEBUG:
139
+ print("options (after parsing command line):")
140
+ dist.dump_option_dicts()
141
+
142
+ if _setup_stop_after == "commandline":
143
+ return dist
144
+
145
+ # And finally, run all the commands found on the command line.
146
+ if ok:
147
+ try:
148
+ dist.run_commands()
149
+ except KeyboardInterrupt:
150
+ raise SystemExit("interrupted")
151
+ except OSError as exc:
152
+ if DEBUG:
153
+ sys.stderr.write("error: %s\n" % (exc,))
154
+ raise
155
+ else:
156
+ raise SystemExit("error: %s" % (exc,))
157
+
158
+ except (DistutilsError,
159
+ CCompilerError) as msg:
160
+ if DEBUG:
161
+ raise
162
+ else:
163
+ raise SystemExit("error: " + str(msg))
164
+
165
+ return dist
166
+
167
+ # setup ()
168
+
169
+
170
+ def run_setup (script_name, script_args=None, stop_after="run"):
171
+ """Run a setup script in a somewhat controlled environment, and
172
+ return the Distribution instance that drives things. This is useful
173
+ if you need to find out the distribution meta-data (passed as
174
+ keyword args from 'script' to 'setup()', or the contents of the
175
+ config files or command-line.
176
+
177
+ 'script_name' is a file that will be read and run with 'exec()';
178
+ 'sys.argv[0]' will be replaced with 'script' for the duration of the
179
+ call. 'script_args' is a list of strings; if supplied,
180
+ 'sys.argv[1:]' will be replaced by 'script_args' for the duration of
181
+ the call.
182
+
183
+ 'stop_after' tells 'setup()' when to stop processing; possible
184
+ values:
185
+ init
186
+ stop after the Distribution instance has been created and
187
+ populated with the keyword arguments to 'setup()'
188
+ config
189
+ stop after config files have been parsed (and their data
190
+ stored in the Distribution instance)
191
+ commandline
192
+ stop after the command-line ('sys.argv[1:]' or 'script_args')
193
+ have been parsed (and the data stored in the Distribution)
194
+ run [default]
195
+ stop after all commands have been run (the same as if 'setup()'
196
+ had been called in the usual way
197
+
198
+ Returns the Distribution instance, which provides all information
199
+ used to drive the Distutils.
200
+ """
201
+ if stop_after not in ('init', 'config', 'commandline', 'run'):
202
+ raise ValueError("invalid value for 'stop_after': %r" % (stop_after,))
203
+
204
+ global _setup_stop_after, _setup_distribution
205
+ _setup_stop_after = stop_after
206
+
207
+ save_argv = sys.argv.copy()
208
+ g = {'__file__': script_name}
209
+ try:
210
+ try:
211
+ sys.argv[0] = script_name
212
+ if script_args is not None:
213
+ sys.argv[1:] = script_args
214
+ with open(script_name, 'rb') as f:
215
+ exec(f.read(), g)
216
+ finally:
217
+ sys.argv = save_argv
218
+ _setup_stop_after = None
219
+ except SystemExit:
220
+ # Hmm, should we do something if exiting with a non-zero code
221
+ # (ie. error)?
222
+ pass
223
+
224
+ if _setup_distribution is None:
225
+ raise RuntimeError(("'distutils.core.setup()' was never called -- "
226
+ "perhaps '%s' is not a Distutils setup script?") % \
227
+ script_name)
228
+
229
+ # I wonder if the setup script's namespace -- g and l -- would be of
230
+ # any interest to callers?
231
+ #print "_setup_distribution:", _setup_distribution
232
+ return _setup_distribution
233
+
234
+ # run_setup ()
janus/lib/python3.10/distutils/cygwinccompiler.py ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.cygwinccompiler
2
+
3
+ Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
4
+ handles the Cygwin port of the GNU C compiler to Windows. It also contains
5
+ the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
6
+ cygwin in no-cygwin mode).
7
+ """
8
+
9
+ # problems:
10
+ #
11
+ # * if you use a msvc compiled python version (1.5.2)
12
+ # 1. you have to insert a __GNUC__ section in its config.h
13
+ # 2. you have to generate an import library for its dll
14
+ # - create a def-file for python??.dll
15
+ # - create an import library using
16
+ # dlltool --dllname python15.dll --def python15.def \
17
+ # --output-lib libpython15.a
18
+ #
19
+ # see also http://starship.python.net/crew/kernr/mingw32/Notes.html
20
+ #
21
+ # * We put export_symbols in a def-file, and don't use
22
+ # --export-all-symbols because it doesn't worked reliable in some
23
+ # tested configurations. And because other windows compilers also
24
+ # need their symbols specified this no serious problem.
25
+ #
26
+ # tested configurations:
27
+ #
28
+ # * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
29
+ # (after patching python's config.h and for C++ some other include files)
30
+ # see also http://starship.python.net/crew/kernr/mingw32/Notes.html
31
+ # * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
32
+ # (ld doesn't support -shared, so we use dllwrap)
33
+ # * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
34
+ # - its dllwrap doesn't work, there is a bug in binutils 2.10.90
35
+ # see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
36
+ # - using gcc -mdll instead dllwrap doesn't work without -static because
37
+ # it tries to link against dlls instead their import libraries. (If
38
+ # it finds the dll first.)
39
+ # By specifying -static we force ld to link against the import libraries,
40
+ # this is windows standard and there are normally not the necessary symbols
41
+ # in the dlls.
42
+ # *** only the version of June 2000 shows these problems
43
+ # * cygwin gcc 3.2/ld 2.13.90 works
44
+ # (ld supports -shared)
45
+ # * mingw gcc 3.2/ld 2.13 works
46
+ # (ld supports -shared)
47
+
48
+ import os
49
+ import sys
50
+ import copy
51
+ from subprocess import Popen, PIPE, check_output
52
+ import re
53
+
54
+ from distutils.unixccompiler import UnixCCompiler
55
+ from distutils.file_util import write_file
56
+ from distutils.errors import (DistutilsExecError, CCompilerError,
57
+ CompileError, UnknownFileError)
58
+ from distutils.version import LooseVersion
59
+ from distutils.spawn import find_executable
60
+
61
+ def get_msvcr():
62
+ """Include the appropriate MSVC runtime library if Python was built
63
+ with MSVC 7.0 or later.
64
+ """
65
+ msc_pos = sys.version.find('MSC v.')
66
+ if msc_pos != -1:
67
+ msc_ver = sys.version[msc_pos+6:msc_pos+10]
68
+ if msc_ver == '1300':
69
+ # MSVC 7.0
70
+ return ['msvcr70']
71
+ elif msc_ver == '1310':
72
+ # MSVC 7.1
73
+ return ['msvcr71']
74
+ elif msc_ver == '1400':
75
+ # VS2005 / MSVC 8.0
76
+ return ['msvcr80']
77
+ elif msc_ver == '1500':
78
+ # VS2008 / MSVC 9.0
79
+ return ['msvcr90']
80
+ elif msc_ver == '1600':
81
+ # VS2010 / MSVC 10.0
82
+ return ['msvcr100']
83
+ elif int(msc_ver) >= 1900:
84
+ # VS2015 / MSVC 14.0
85
+ return ['msvcr140']
86
+ else:
87
+ raise ValueError("Unknown MS Compiler version %s " % msc_ver)
88
+
89
+
90
+ class CygwinCCompiler(UnixCCompiler):
91
+ """ Handles the Cygwin port of the GNU C compiler to Windows.
92
+ """
93
+ compiler_type = 'cygwin'
94
+ obj_extension = ".o"
95
+ static_lib_extension = ".a"
96
+ shared_lib_extension = ".dll"
97
+ static_lib_format = "lib%s%s"
98
+ shared_lib_format = "%s%s"
99
+ exe_extension = ".exe"
100
+
101
+ def __init__(self, verbose=0, dry_run=0, force=0):
102
+
103
+ UnixCCompiler.__init__(self, verbose, dry_run, force)
104
+
105
+ status, details = check_config_h()
106
+ self.debug_print("Python's GCC status: %s (details: %s)" %
107
+ (status, details))
108
+ if status is not CONFIG_H_OK:
109
+ self.warn(
110
+ "Python's pyconfig.h doesn't seem to support your compiler. "
111
+ "Reason: %s. "
112
+ "Compiling may fail because of undefined preprocessor macros."
113
+ % details)
114
+
115
+ self.gcc_version, self.ld_version, self.dllwrap_version = \
116
+ get_versions()
117
+ self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" %
118
+ (self.gcc_version,
119
+ self.ld_version,
120
+ self.dllwrap_version) )
121
+
122
+ # ld_version >= "2.10.90" and < "2.13" should also be able to use
123
+ # gcc -mdll instead of dllwrap
124
+ # Older dllwraps had own version numbers, newer ones use the
125
+ # same as the rest of binutils ( also ld )
126
+ # dllwrap 2.10.90 is buggy
127
+ if self.ld_version >= "2.10.90":
128
+ self.linker_dll = "gcc"
129
+ else:
130
+ self.linker_dll = "dllwrap"
131
+
132
+ # ld_version >= "2.13" support -shared so use it instead of
133
+ # -mdll -static
134
+ if self.ld_version >= "2.13":
135
+ shared_option = "-shared"
136
+ else:
137
+ shared_option = "-mdll -static"
138
+
139
+ # Hard-code GCC because that's what this is all about.
140
+ # XXX optimization, warnings etc. should be customizable.
141
+ self.set_executables(compiler='gcc -mcygwin -O -Wall',
142
+ compiler_so='gcc -mcygwin -mdll -O -Wall',
143
+ compiler_cxx='g++ -mcygwin -O -Wall',
144
+ linker_exe='gcc -mcygwin',
145
+ linker_so=('%s -mcygwin %s' %
146
+ (self.linker_dll, shared_option)))
147
+
148
+ # cygwin and mingw32 need different sets of libraries
149
+ if self.gcc_version == "2.91.57":
150
+ # cygwin shouldn't need msvcrt, but without the dlls will crash
151
+ # (gcc version 2.91.57) -- perhaps something about initialization
152
+ self.dll_libraries=["msvcrt"]
153
+ self.warn(
154
+ "Consider upgrading to a newer version of gcc")
155
+ else:
156
+ # Include the appropriate MSVC runtime library if Python was built
157
+ # with MSVC 7.0 or later.
158
+ self.dll_libraries = get_msvcr()
159
+
160
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
161
+ """Compiles the source by spawning GCC and windres if needed."""
162
+ if ext == '.rc' or ext == '.res':
163
+ # gcc needs '.res' and '.rc' compiled to object files !!!
164
+ try:
165
+ self.spawn(["windres", "-i", src, "-o", obj])
166
+ except DistutilsExecError as msg:
167
+ raise CompileError(msg)
168
+ else: # for other files use the C-compiler
169
+ try:
170
+ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
171
+ extra_postargs)
172
+ except DistutilsExecError as msg:
173
+ raise CompileError(msg)
174
+
175
+ def link(self, target_desc, objects, output_filename, output_dir=None,
176
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
177
+ export_symbols=None, debug=0, extra_preargs=None,
178
+ extra_postargs=None, build_temp=None, target_lang=None):
179
+ """Link the objects."""
180
+ # use separate copies, so we can modify the lists
181
+ extra_preargs = copy.copy(extra_preargs or [])
182
+ libraries = copy.copy(libraries or [])
183
+ objects = copy.copy(objects or [])
184
+
185
+ # Additional libraries
186
+ libraries.extend(self.dll_libraries)
187
+
188
+ # handle export symbols by creating a def-file
189
+ # with executables this only works with gcc/ld as linker
190
+ if ((export_symbols is not None) and
191
+ (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
192
+ # (The linker doesn't do anything if output is up-to-date.
193
+ # So it would probably better to check if we really need this,
194
+ # but for this we had to insert some unchanged parts of
195
+ # UnixCCompiler, and this is not what we want.)
196
+
197
+ # we want to put some files in the same directory as the
198
+ # object files are, build_temp doesn't help much
199
+ # where are the object files
200
+ temp_dir = os.path.dirname(objects[0])
201
+ # name of dll to give the helper files the same base name
202
+ (dll_name, dll_extension) = os.path.splitext(
203
+ os.path.basename(output_filename))
204
+
205
+ # generate the filenames for these files
206
+ def_file = os.path.join(temp_dir, dll_name + ".def")
207
+ lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
208
+
209
+ # Generate .def file
210
+ contents = [
211
+ "LIBRARY %s" % os.path.basename(output_filename),
212
+ "EXPORTS"]
213
+ for sym in export_symbols:
214
+ contents.append(sym)
215
+ self.execute(write_file, (def_file, contents),
216
+ "writing %s" % def_file)
217
+
218
+ # next add options for def-file and to creating import libraries
219
+
220
+ # dllwrap uses different options than gcc/ld
221
+ if self.linker_dll == "dllwrap":
222
+ extra_preargs.extend(["--output-lib", lib_file])
223
+ # for dllwrap we have to use a special option
224
+ extra_preargs.extend(["--def", def_file])
225
+ # we use gcc/ld here and can be sure ld is >= 2.9.10
226
+ else:
227
+ # doesn't work: bfd_close build\...\libfoo.a: Invalid operation
228
+ #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file])
229
+ # for gcc/ld the def-file is specified as any object files
230
+ objects.append(def_file)
231
+
232
+ #end: if ((export_symbols is not None) and
233
+ # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
234
+
235
+ # who wants symbols and a many times larger output file
236
+ # should explicitly switch the debug mode on
237
+ # otherwise we let dllwrap/ld strip the output file
238
+ # (On my machine: 10KiB < stripped_file < ??100KiB
239
+ # unstripped_file = stripped_file + XXX KiB
240
+ # ( XXX=254 for a typical python extension))
241
+ if not debug:
242
+ extra_preargs.append("-s")
243
+
244
+ UnixCCompiler.link(self, target_desc, objects, output_filename,
245
+ output_dir, libraries, library_dirs,
246
+ runtime_library_dirs,
247
+ None, # export_symbols, we do this in our def-file
248
+ debug, extra_preargs, extra_postargs, build_temp,
249
+ target_lang)
250
+
251
+ # -- Miscellaneous methods -----------------------------------------
252
+
253
+ def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
254
+ """Adds supports for rc and res files."""
255
+ if output_dir is None:
256
+ output_dir = ''
257
+ obj_names = []
258
+ for src_name in source_filenames:
259
+ # use normcase to make sure '.rc' is really '.rc' and not '.RC'
260
+ base, ext = os.path.splitext(os.path.normcase(src_name))
261
+ if ext not in (self.src_extensions + ['.rc','.res']):
262
+ raise UnknownFileError("unknown file type '%s' (from '%s')" % \
263
+ (ext, src_name))
264
+ if strip_dir:
265
+ base = os.path.basename (base)
266
+ if ext in ('.res', '.rc'):
267
+ # these need to be compiled to object files
268
+ obj_names.append (os.path.join(output_dir,
269
+ base + ext + self.obj_extension))
270
+ else:
271
+ obj_names.append (os.path.join(output_dir,
272
+ base + self.obj_extension))
273
+ return obj_names
274
+
275
+ # the same as cygwin plus some additional parameters
276
+ class Mingw32CCompiler(CygwinCCompiler):
277
+ """ Handles the Mingw32 port of the GNU C compiler to Windows.
278
+ """
279
+ compiler_type = 'mingw32'
280
+
281
+ def __init__(self, verbose=0, dry_run=0, force=0):
282
+
283
+ CygwinCCompiler.__init__ (self, verbose, dry_run, force)
284
+
285
+ # ld_version >= "2.13" support -shared so use it instead of
286
+ # -mdll -static
287
+ if self.ld_version >= "2.13":
288
+ shared_option = "-shared"
289
+ else:
290
+ shared_option = "-mdll -static"
291
+
292
+ # A real mingw32 doesn't need to specify a different entry point,
293
+ # but cygwin 2.91.57 in no-cygwin-mode needs it.
294
+ if self.gcc_version <= "2.91.57":
295
+ entry_point = '--entry _DllMain@12'
296
+ else:
297
+ entry_point = ''
298
+
299
+ if is_cygwingcc():
300
+ raise CCompilerError(
301
+ 'Cygwin gcc cannot be used with --compiler=mingw32')
302
+
303
+ self.set_executables(compiler='gcc -O -Wall',
304
+ compiler_so='gcc -mdll -O -Wall',
305
+ compiler_cxx='g++ -O -Wall',
306
+ linker_exe='gcc',
307
+ linker_so='%s %s %s'
308
+ % (self.linker_dll, shared_option,
309
+ entry_point))
310
+ # Maybe we should also append -mthreads, but then the finished
311
+ # dlls need another dll (mingwm10.dll see Mingw32 docs)
312
+ # (-mthreads: Support thread-safe exception handling on `Mingw32')
313
+
314
+ # no additional libraries needed
315
+ self.dll_libraries=[]
316
+
317
+ # Include the appropriate MSVC runtime library if Python was built
318
+ # with MSVC 7.0 or later.
319
+ self.dll_libraries = get_msvcr()
320
+
321
+ # Because these compilers aren't configured in Python's pyconfig.h file by
322
+ # default, we should at least warn the user if he is using an unmodified
323
+ # version.
324
+
325
+ CONFIG_H_OK = "ok"
326
+ CONFIG_H_NOTOK = "not ok"
327
+ CONFIG_H_UNCERTAIN = "uncertain"
328
+
329
+ def check_config_h():
330
+ """Check if the current Python installation appears amenable to building
331
+ extensions with GCC.
332
+
333
+ Returns a tuple (status, details), where 'status' is one of the following
334
+ constants:
335
+
336
+ - CONFIG_H_OK: all is well, go ahead and compile
337
+ - CONFIG_H_NOTOK: doesn't look good
338
+ - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
339
+
340
+ 'details' is a human-readable string explaining the situation.
341
+
342
+ Note there are two ways to conclude "OK": either 'sys.version' contains
343
+ the string "GCC" (implying that this Python was built with GCC), or the
344
+ installed "pyconfig.h" contains the string "__GNUC__".
345
+ """
346
+
347
+ # XXX since this function also checks sys.version, it's not strictly a
348
+ # "pyconfig.h" check -- should probably be renamed...
349
+
350
+ from distutils import sysconfig
351
+
352
+ # if sys.version contains GCC then python was compiled with GCC, and the
353
+ # pyconfig.h file should be OK
354
+ if "GCC" in sys.version:
355
+ return CONFIG_H_OK, "sys.version mentions 'GCC'"
356
+
357
+ # let's see if __GNUC__ is mentioned in python.h
358
+ fn = sysconfig.get_config_h_filename()
359
+ try:
360
+ config_h = open(fn)
361
+ try:
362
+ if "__GNUC__" in config_h.read():
363
+ return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
364
+ else:
365
+ return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
366
+ finally:
367
+ config_h.close()
368
+ except OSError as exc:
369
+ return (CONFIG_H_UNCERTAIN,
370
+ "couldn't read '%s': %s" % (fn, exc.strerror))
371
+
372
+ RE_VERSION = re.compile(br'(\d+\.\d+(\.\d+)*)')
373
+
374
+ def _find_exe_version(cmd):
375
+ """Find the version of an executable by running `cmd` in the shell.
376
+
377
+ If the command is not found, or the output does not match
378
+ `RE_VERSION`, returns None.
379
+ """
380
+ executable = cmd.split()[0]
381
+ if find_executable(executable) is None:
382
+ return None
383
+ out = Popen(cmd, shell=True, stdout=PIPE).stdout
384
+ try:
385
+ out_string = out.read()
386
+ finally:
387
+ out.close()
388
+ result = RE_VERSION.search(out_string)
389
+ if result is None:
390
+ return None
391
+ # LooseVersion works with strings
392
+ # so we need to decode our bytes
393
+ return LooseVersion(result.group(1).decode())
394
+
395
+ def get_versions():
396
+ """ Try to find out the versions of gcc, ld and dllwrap.
397
+
398
+ If not possible it returns None for it.
399
+ """
400
+ commands = ['gcc -dumpversion', 'ld -v', 'dllwrap --version']
401
+ return tuple([_find_exe_version(cmd) for cmd in commands])
402
+
403
+ def is_cygwingcc():
404
+ '''Try to determine if the gcc that would be used is from cygwin.'''
405
+ out_string = check_output(['gcc', '-dumpmachine'])
406
+ return out_string.strip().endswith(b'cygwin')
janus/lib/python3.10/distutils/dep_util.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.dep_util
2
+
3
+ Utility functions for simple, timestamp-based dependency of files
4
+ and groups of files; also, function based entirely on such
5
+ timestamp dependency analysis."""
6
+
7
+ import os
8
+ from distutils.errors import DistutilsFileError
9
+
10
+
11
+ def newer (source, target):
12
+ """Return true if 'source' exists and is more recently modified than
13
+ 'target', or if 'source' exists and 'target' doesn't. Return false if
14
+ both exist and 'target' is the same age or younger than 'source'.
15
+ Raise DistutilsFileError if 'source' does not exist.
16
+ """
17
+ if not os.path.exists(source):
18
+ raise DistutilsFileError("file '%s' does not exist" %
19
+ os.path.abspath(source))
20
+ if not os.path.exists(target):
21
+ return 1
22
+
23
+ from stat import ST_MTIME
24
+ mtime1 = os.stat(source)[ST_MTIME]
25
+ mtime2 = os.stat(target)[ST_MTIME]
26
+
27
+ return mtime1 > mtime2
28
+
29
+ # newer ()
30
+
31
+
32
+ def newer_pairwise (sources, targets):
33
+ """Walk two filename lists in parallel, testing if each source is newer
34
+ than its corresponding target. Return a pair of lists (sources,
35
+ targets) where source is newer than target, according to the semantics
36
+ of 'newer()'.
37
+ """
38
+ if len(sources) != len(targets):
39
+ raise ValueError("'sources' and 'targets' must be same length")
40
+
41
+ # build a pair of lists (sources, targets) where source is newer
42
+ n_sources = []
43
+ n_targets = []
44
+ for i in range(len(sources)):
45
+ if newer(sources[i], targets[i]):
46
+ n_sources.append(sources[i])
47
+ n_targets.append(targets[i])
48
+
49
+ return (n_sources, n_targets)
50
+
51
+ # newer_pairwise ()
52
+
53
+
54
+ def newer_group (sources, target, missing='error'):
55
+ """Return true if 'target' is out-of-date with respect to any file
56
+ listed in 'sources'. In other words, if 'target' exists and is newer
57
+ than every file in 'sources', return false; otherwise return true.
58
+ 'missing' controls what we do when a source file is missing; the
59
+ default ("error") is to blow up with an OSError from inside 'stat()';
60
+ if it is "ignore", we silently drop any missing source files; if it is
61
+ "newer", any missing source files make us assume that 'target' is
62
+ out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
63
+ carry out commands that wouldn't work because inputs are missing, but
64
+ that doesn't matter because you're not actually going to run the
65
+ commands).
66
+ """
67
+ # If the target doesn't even exist, then it's definitely out-of-date.
68
+ if not os.path.exists(target):
69
+ return 1
70
+
71
+ # Otherwise we have to find out the hard way: if *any* source file
72
+ # is more recent than 'target', then 'target' is out-of-date and
73
+ # we can immediately return true. If we fall through to the end
74
+ # of the loop, then 'target' is up-to-date and we return false.
75
+ from stat import ST_MTIME
76
+ target_mtime = os.stat(target)[ST_MTIME]
77
+ for source in sources:
78
+ if not os.path.exists(source):
79
+ if missing == 'error': # blow up when we stat() the file
80
+ pass
81
+ elif missing == 'ignore': # missing source dropped from
82
+ continue # target's dependency list
83
+ elif missing == 'newer': # missing source means target is
84
+ return 1 # out-of-date
85
+
86
+ source_mtime = os.stat(source)[ST_MTIME]
87
+ if source_mtime > target_mtime:
88
+ return 1
89
+ else:
90
+ return 0
91
+
92
+ # newer_group ()
janus/lib/python3.10/distutils/dir_util.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.dir_util
2
+
3
+ Utility functions for manipulating directories and directory trees."""
4
+
5
+ import os
6
+ import errno
7
+ from distutils.errors import DistutilsFileError, DistutilsInternalError
8
+ from distutils import log
9
+
10
+ # cache for by mkpath() -- in addition to cheapening redundant calls,
11
+ # eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
12
+ _path_created = {}
13
+
14
+ # I don't use os.makedirs because a) it's new to Python 1.5.2, and
15
+ # b) it blows up if the directory already exists (I want to silently
16
+ # succeed in that case).
17
+ def mkpath(name, mode=0o777, verbose=1, dry_run=0):
18
+ """Create a directory and any missing ancestor directories.
19
+
20
+ If the directory already exists (or if 'name' is the empty string, which
21
+ means the current directory, which of course exists), then do nothing.
22
+ Raise DistutilsFileError if unable to create some directory along the way
23
+ (eg. some sub-path exists, but is a file rather than a directory).
24
+ If 'verbose' is true, print a one-line summary of each mkdir to stdout.
25
+ Return the list of directories actually created.
26
+ """
27
+
28
+ global _path_created
29
+
30
+ # Detect a common bug -- name is None
31
+ if not isinstance(name, str):
32
+ raise DistutilsInternalError(
33
+ "mkpath: 'name' must be a string (got %r)" % (name,))
34
+
35
+ # XXX what's the better way to handle verbosity? print as we create
36
+ # each directory in the path (the current behaviour), or only announce
37
+ # the creation of the whole path? (quite easy to do the latter since
38
+ # we're not using a recursive algorithm)
39
+
40
+ name = os.path.normpath(name)
41
+ created_dirs = []
42
+ if os.path.isdir(name) or name == '':
43
+ return created_dirs
44
+ if _path_created.get(os.path.abspath(name)):
45
+ return created_dirs
46
+
47
+ (head, tail) = os.path.split(name)
48
+ tails = [tail] # stack of lone dirs to create
49
+
50
+ while head and tail and not os.path.isdir(head):
51
+ (head, tail) = os.path.split(head)
52
+ tails.insert(0, tail) # push next higher dir onto stack
53
+
54
+ # now 'head' contains the deepest directory that already exists
55
+ # (that is, the child of 'head' in 'name' is the highest directory
56
+ # that does *not* exist)
57
+ for d in tails:
58
+ #print "head = %s, d = %s: " % (head, d),
59
+ head = os.path.join(head, d)
60
+ abs_head = os.path.abspath(head)
61
+
62
+ if _path_created.get(abs_head):
63
+ continue
64
+
65
+ if verbose >= 1:
66
+ log.info("creating %s", head)
67
+
68
+ if not dry_run:
69
+ try:
70
+ os.mkdir(head, mode)
71
+ except OSError as exc:
72
+ if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
73
+ raise DistutilsFileError(
74
+ "could not create '%s': %s" % (head, exc.args[-1]))
75
+ created_dirs.append(head)
76
+
77
+ _path_created[abs_head] = 1
78
+ return created_dirs
79
+
80
+ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
81
+ """Create all the empty directories under 'base_dir' needed to put 'files'
82
+ there.
83
+
84
+ 'base_dir' is just the name of a directory which doesn't necessarily
85
+ exist yet; 'files' is a list of filenames to be interpreted relative to
86
+ 'base_dir'. 'base_dir' + the directory portion of every file in 'files'
87
+ will be created if it doesn't already exist. 'mode', 'verbose' and
88
+ 'dry_run' flags are as for 'mkpath()'.
89
+ """
90
+ # First get the list of directories to create
91
+ need_dir = set()
92
+ for file in files:
93
+ need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
94
+
95
+ # Now create them
96
+ for dir in sorted(need_dir):
97
+ mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
98
+
99
+ def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
100
+ preserve_symlinks=0, update=0, verbose=1, dry_run=0):
101
+ """Copy an entire directory tree 'src' to a new location 'dst'.
102
+
103
+ Both 'src' and 'dst' must be directory names. If 'src' is not a
104
+ directory, raise DistutilsFileError. If 'dst' does not exist, it is
105
+ created with 'mkpath()'. The end result of the copy is that every
106
+ file in 'src' is copied to 'dst', and directories under 'src' are
107
+ recursively copied to 'dst'. Return the list of files that were
108
+ copied or might have been copied, using their output name. The
109
+ return value is unaffected by 'update' or 'dry_run': it is simply
110
+ the list of all files under 'src', with the names changed to be
111
+ under 'dst'.
112
+
113
+ 'preserve_mode' and 'preserve_times' are the same as for
114
+ 'copy_file'; note that they only apply to regular files, not to
115
+ directories. If 'preserve_symlinks' is true, symlinks will be
116
+ copied as symlinks (on platforms that support them!); otherwise
117
+ (the default), the destination of the symlink will be copied.
118
+ 'update' and 'verbose' are the same as for 'copy_file'.
119
+ """
120
+ from distutils.file_util import copy_file
121
+
122
+ if not dry_run and not os.path.isdir(src):
123
+ raise DistutilsFileError(
124
+ "cannot copy tree '%s': not a directory" % src)
125
+ try:
126
+ names = os.listdir(src)
127
+ except OSError as e:
128
+ if dry_run:
129
+ names = []
130
+ else:
131
+ raise DistutilsFileError(
132
+ "error listing files in '%s': %s" % (src, e.strerror))
133
+
134
+ if not dry_run:
135
+ mkpath(dst, verbose=verbose)
136
+
137
+ outputs = []
138
+
139
+ for n in names:
140
+ src_name = os.path.join(src, n)
141
+ dst_name = os.path.join(dst, n)
142
+
143
+ if n.startswith('.nfs'):
144
+ # skip NFS rename files
145
+ continue
146
+
147
+ if preserve_symlinks and os.path.islink(src_name):
148
+ link_dest = os.readlink(src_name)
149
+ if verbose >= 1:
150
+ log.info("linking %s -> %s", dst_name, link_dest)
151
+ if not dry_run:
152
+ os.symlink(link_dest, dst_name)
153
+ outputs.append(dst_name)
154
+
155
+ elif os.path.isdir(src_name):
156
+ outputs.extend(
157
+ copy_tree(src_name, dst_name, preserve_mode,
158
+ preserve_times, preserve_symlinks, update,
159
+ verbose=verbose, dry_run=dry_run))
160
+ else:
161
+ copy_file(src_name, dst_name, preserve_mode,
162
+ preserve_times, update, verbose=verbose,
163
+ dry_run=dry_run)
164
+ outputs.append(dst_name)
165
+
166
+ return outputs
167
+
168
+ def _build_cmdtuple(path, cmdtuples):
169
+ """Helper for remove_tree()."""
170
+ for f in os.listdir(path):
171
+ real_f = os.path.join(path,f)
172
+ if os.path.isdir(real_f) and not os.path.islink(real_f):
173
+ _build_cmdtuple(real_f, cmdtuples)
174
+ else:
175
+ cmdtuples.append((os.remove, real_f))
176
+ cmdtuples.append((os.rmdir, path))
177
+
178
+ def remove_tree(directory, verbose=1, dry_run=0):
179
+ """Recursively remove an entire directory tree.
180
+
181
+ Any errors are ignored (apart from being reported to stdout if 'verbose'
182
+ is true).
183
+ """
184
+ global _path_created
185
+
186
+ if verbose >= 1:
187
+ log.info("removing '%s' (and everything under it)", directory)
188
+ if dry_run:
189
+ return
190
+ cmdtuples = []
191
+ _build_cmdtuple(directory, cmdtuples)
192
+ for cmd in cmdtuples:
193
+ try:
194
+ cmd[0](cmd[1])
195
+ # remove dir from cache if it's already there
196
+ abspath = os.path.abspath(cmd[1])
197
+ if abspath in _path_created:
198
+ del _path_created[abspath]
199
+ except OSError as exc:
200
+ log.warn("error removing %s: %s", directory, exc)
201
+
202
+ def ensure_relative(path):
203
+ """Take the full path 'path', and make it a relative path.
204
+
205
+ This is useful to make 'path' the second argument to os.path.join().
206
+ """
207
+ drive, path = os.path.splitdrive(path)
208
+ if path[0:1] == os.sep:
209
+ path = drive + path[1:]
210
+ return path
janus/lib/python3.10/distutils/dist.py ADDED
@@ -0,0 +1,1256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.dist
2
+
3
+ Provides the Distribution class, which represents the module distribution
4
+ being built/installed/distributed.
5
+ """
6
+
7
+ import sys
8
+ import os
9
+ import re
10
+ from email import message_from_file
11
+
12
+ try:
13
+ import warnings
14
+ except ImportError:
15
+ warnings = None
16
+
17
+ from distutils.errors import *
18
+ from distutils.fancy_getopt import FancyGetopt, translate_longopt
19
+ from distutils.util import check_environ, strtobool, rfc822_escape
20
+ from distutils import log
21
+ from distutils.debug import DEBUG
22
+
23
+ # Regex to define acceptable Distutils command names. This is not *quite*
24
+ # the same as a Python NAME -- I don't allow leading underscores. The fact
25
+ # that they're very similar is no coincidence; the default naming scheme is
26
+ # to look for a Python module named after the command.
27
+ command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
28
+
29
+
30
+ def _ensure_list(value, fieldname):
31
+ if isinstance(value, str):
32
+ # a string containing comma separated values is okay. It will
33
+ # be converted to a list by Distribution.finalize_options().
34
+ pass
35
+ elif not isinstance(value, list):
36
+ # passing a tuple or an iterator perhaps, warn and convert
37
+ typename = type(value).__name__
38
+ msg = f"Warning: '{fieldname}' should be a list, got type '{typename}'"
39
+ log.log(log.WARN, msg)
40
+ value = list(value)
41
+ return value
42
+
43
+
44
+ class Distribution:
45
+ """The core of the Distutils. Most of the work hiding behind 'setup'
46
+ is really done within a Distribution instance, which farms the work out
47
+ to the Distutils commands specified on the command line.
48
+
49
+ Setup scripts will almost never instantiate Distribution directly,
50
+ unless the 'setup()' function is totally inadequate to their needs.
51
+ However, it is conceivable that a setup script might wish to subclass
52
+ Distribution for some specialized purpose, and then pass the subclass
53
+ to 'setup()' as the 'distclass' keyword argument. If so, it is
54
+ necessary to respect the expectations that 'setup' has of Distribution.
55
+ See the code for 'setup()', in core.py, for details.
56
+ """
57
+
58
+ # 'global_options' describes the command-line options that may be
59
+ # supplied to the setup script prior to any actual commands.
60
+ # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
61
+ # these global options. This list should be kept to a bare minimum,
62
+ # since every global option is also valid as a command option -- and we
63
+ # don't want to pollute the commands with too many options that they
64
+ # have minimal control over.
65
+ # The fourth entry for verbose means that it can be repeated.
66
+ global_options = [
67
+ ('verbose', 'v', "run verbosely (default)", 1),
68
+ ('quiet', 'q', "run quietly (turns verbosity off)"),
69
+ ('dry-run', 'n', "don't actually do anything"),
70
+ ('help', 'h', "show detailed help message"),
71
+ ('no-user-cfg', None,
72
+ 'ignore pydistutils.cfg in your home directory'),
73
+ ]
74
+
75
+ # 'common_usage' is a short (2-3 line) string describing the common
76
+ # usage of the setup script.
77
+ common_usage = """\
78
+ Common commands: (see '--help-commands' for more)
79
+
80
+ setup.py build will build the package underneath 'build/'
81
+ setup.py install will install the package
82
+ """
83
+
84
+ # options that are not propagated to the commands
85
+ display_options = [
86
+ ('help-commands', None,
87
+ "list all available commands"),
88
+ ('name', None,
89
+ "print package name"),
90
+ ('version', 'V',
91
+ "print package version"),
92
+ ('fullname', None,
93
+ "print <package name>-<version>"),
94
+ ('author', None,
95
+ "print the author's name"),
96
+ ('author-email', None,
97
+ "print the author's email address"),
98
+ ('maintainer', None,
99
+ "print the maintainer's name"),
100
+ ('maintainer-email', None,
101
+ "print the maintainer's email address"),
102
+ ('contact', None,
103
+ "print the maintainer's name if known, else the author's"),
104
+ ('contact-email', None,
105
+ "print the maintainer's email address if known, else the author's"),
106
+ ('url', None,
107
+ "print the URL for this package"),
108
+ ('license', None,
109
+ "print the license of the package"),
110
+ ('licence', None,
111
+ "alias for --license"),
112
+ ('description', None,
113
+ "print the package description"),
114
+ ('long-description', None,
115
+ "print the long package description"),
116
+ ('platforms', None,
117
+ "print the list of platforms"),
118
+ ('classifiers', None,
119
+ "print the list of classifiers"),
120
+ ('keywords', None,
121
+ "print the list of keywords"),
122
+ ('provides', None,
123
+ "print the list of packages/modules provided"),
124
+ ('requires', None,
125
+ "print the list of packages/modules required"),
126
+ ('obsoletes', None,
127
+ "print the list of packages/modules made obsolete")
128
+ ]
129
+ display_option_names = [translate_longopt(x[0]) for x in display_options]
130
+
131
+ # negative options are options that exclude other options
132
+ negative_opt = {'quiet': 'verbose'}
133
+
134
+ # -- Creation/initialization methods -------------------------------
135
+
136
+ def __init__(self, attrs=None):
137
+ """Construct a new Distribution instance: initialize all the
138
+ attributes of a Distribution, and then use 'attrs' (a dictionary
139
+ mapping attribute names to values) to assign some of those
140
+ attributes their "real" values. (Any attributes not mentioned in
141
+ 'attrs' will be assigned to some null value: 0, None, an empty list
142
+ or dictionary, etc.) Most importantly, initialize the
143
+ 'command_obj' attribute to the empty dictionary; this will be
144
+ filled in with real command objects by 'parse_command_line()'.
145
+ """
146
+
147
+ # Default values for our command-line options
148
+ self.verbose = 1
149
+ self.dry_run = 0
150
+ self.help = 0
151
+ for attr in self.display_option_names:
152
+ setattr(self, attr, 0)
153
+
154
+ # Store the distribution meta-data (name, version, author, and so
155
+ # forth) in a separate object -- we're getting to have enough
156
+ # information here (and enough command-line options) that it's
157
+ # worth it. Also delegate 'get_XXX()' methods to the 'metadata'
158
+ # object in a sneaky and underhanded (but efficient!) way.
159
+ self.metadata = DistributionMetadata()
160
+ for basename in self.metadata._METHOD_BASENAMES:
161
+ method_name = "get_" + basename
162
+ setattr(self, method_name, getattr(self.metadata, method_name))
163
+
164
+ # 'cmdclass' maps command names to class objects, so we
165
+ # can 1) quickly figure out which class to instantiate when
166
+ # we need to create a new command object, and 2) have a way
167
+ # for the setup script to override command classes
168
+ self.cmdclass = {}
169
+
170
+ # 'command_packages' is a list of packages in which commands
171
+ # are searched for. The factory for command 'foo' is expected
172
+ # to be named 'foo' in the module 'foo' in one of the packages
173
+ # named here. This list is searched from the left; an error
174
+ # is raised if no named package provides the command being
175
+ # searched for. (Always access using get_command_packages().)
176
+ self.command_packages = None
177
+
178
+ # 'script_name' and 'script_args' are usually set to sys.argv[0]
179
+ # and sys.argv[1:], but they can be overridden when the caller is
180
+ # not necessarily a setup script run from the command-line.
181
+ self.script_name = None
182
+ self.script_args = None
183
+
184
+ # 'command_options' is where we store command options between
185
+ # parsing them (from config files, the command-line, etc.) and when
186
+ # they are actually needed -- ie. when the command in question is
187
+ # instantiated. It is a dictionary of dictionaries of 2-tuples:
188
+ # command_options = { command_name : { option : (source, value) } }
189
+ self.command_options = {}
190
+
191
+ # 'dist_files' is the list of (command, pyversion, file) that
192
+ # have been created by any dist commands run so far. This is
193
+ # filled regardless of whether the run is dry or not. pyversion
194
+ # gives sysconfig.get_python_version() if the dist file is
195
+ # specific to a Python version, 'any' if it is good for all
196
+ # Python versions on the target platform, and '' for a source
197
+ # file. pyversion should not be used to specify minimum or
198
+ # maximum required Python versions; use the metainfo for that
199
+ # instead.
200
+ self.dist_files = []
201
+
202
+ # These options are really the business of various commands, rather
203
+ # than of the Distribution itself. We provide aliases for them in
204
+ # Distribution as a convenience to the developer.
205
+ self.packages = None
206
+ self.package_data = {}
207
+ self.package_dir = None
208
+ self.py_modules = None
209
+ self.libraries = None
210
+ self.headers = None
211
+ self.ext_modules = None
212
+ self.ext_package = None
213
+ self.include_dirs = None
214
+ self.extra_path = None
215
+ self.scripts = None
216
+ self.data_files = None
217
+ self.password = ''
218
+
219
+ # And now initialize bookkeeping stuff that can't be supplied by
220
+ # the caller at all. 'command_obj' maps command names to
221
+ # Command instances -- that's how we enforce that every command
222
+ # class is a singleton.
223
+ self.command_obj = {}
224
+
225
+ # 'have_run' maps command names to boolean values; it keeps track
226
+ # of whether we have actually run a particular command, to make it
227
+ # cheap to "run" a command whenever we think we might need to -- if
228
+ # it's already been done, no need for expensive filesystem
229
+ # operations, we just check the 'have_run' dictionary and carry on.
230
+ # It's only safe to query 'have_run' for a command class that has
231
+ # been instantiated -- a false value will be inserted when the
232
+ # command object is created, and replaced with a true value when
233
+ # the command is successfully run. Thus it's probably best to use
234
+ # '.get()' rather than a straight lookup.
235
+ self.have_run = {}
236
+
237
+ # Now we'll use the attrs dictionary (ultimately, keyword args from
238
+ # the setup script) to possibly override any or all of these
239
+ # distribution options.
240
+
241
+ if attrs:
242
+ # Pull out the set of command options and work on them
243
+ # specifically. Note that this order guarantees that aliased
244
+ # command options will override any supplied redundantly
245
+ # through the general options dictionary.
246
+ options = attrs.get('options')
247
+ if options is not None:
248
+ del attrs['options']
249
+ for (command, cmd_options) in options.items():
250
+ opt_dict = self.get_option_dict(command)
251
+ for (opt, val) in cmd_options.items():
252
+ opt_dict[opt] = ("setup script", val)
253
+
254
+ if 'licence' in attrs:
255
+ attrs['license'] = attrs['licence']
256
+ del attrs['licence']
257
+ msg = "'licence' distribution option is deprecated; use 'license'"
258
+ if warnings is not None:
259
+ warnings.warn(msg)
260
+ else:
261
+ sys.stderr.write(msg + "\n")
262
+
263
+ # Now work on the rest of the attributes. Any attribute that's
264
+ # not already defined is invalid!
265
+ for (key, val) in attrs.items():
266
+ if hasattr(self.metadata, "set_" + key):
267
+ getattr(self.metadata, "set_" + key)(val)
268
+ elif hasattr(self.metadata, key):
269
+ setattr(self.metadata, key, val)
270
+ elif hasattr(self, key):
271
+ setattr(self, key, val)
272
+ else:
273
+ msg = "Unknown distribution option: %s" % repr(key)
274
+ warnings.warn(msg)
275
+
276
+ # no-user-cfg is handled before other command line args
277
+ # because other args override the config files, and this
278
+ # one is needed before we can load the config files.
279
+ # If attrs['script_args'] wasn't passed, assume false.
280
+ #
281
+ # This also make sure we just look at the global options
282
+ self.want_user_cfg = True
283
+
284
+ if self.script_args is not None:
285
+ for arg in self.script_args:
286
+ if not arg.startswith('-'):
287
+ break
288
+ if arg == '--no-user-cfg':
289
+ self.want_user_cfg = False
290
+ break
291
+
292
+ self.finalize_options()
293
+
294
+ def get_option_dict(self, command):
295
+ """Get the option dictionary for a given command. If that
296
+ command's option dictionary hasn't been created yet, then create it
297
+ and return the new dictionary; otherwise, return the existing
298
+ option dictionary.
299
+ """
300
+ dict = self.command_options.get(command)
301
+ if dict is None:
302
+ dict = self.command_options[command] = {}
303
+ return dict
304
+
305
+ def dump_option_dicts(self, header=None, commands=None, indent=""):
306
+ from pprint import pformat
307
+
308
+ if commands is None: # dump all command option dicts
309
+ commands = sorted(self.command_options.keys())
310
+
311
+ if header is not None:
312
+ self.announce(indent + header)
313
+ indent = indent + " "
314
+
315
+ if not commands:
316
+ self.announce(indent + "no commands known yet")
317
+ return
318
+
319
+ for cmd_name in commands:
320
+ opt_dict = self.command_options.get(cmd_name)
321
+ if opt_dict is None:
322
+ self.announce(indent +
323
+ "no option dict for '%s' command" % cmd_name)
324
+ else:
325
+ self.announce(indent +
326
+ "option dict for '%s' command:" % cmd_name)
327
+ out = pformat(opt_dict)
328
+ for line in out.split('\n'):
329
+ self.announce(indent + " " + line)
330
+
331
+ # -- Config file finding/parsing methods ---------------------------
332
+
333
+ def find_config_files(self):
334
+ """Find as many configuration files as should be processed for this
335
+ platform, and return a list of filenames in the order in which they
336
+ should be parsed. The filenames returned are guaranteed to exist
337
+ (modulo nasty race conditions).
338
+
339
+ There are three possible config files: distutils.cfg in the
340
+ Distutils installation directory (ie. where the top-level
341
+ Distutils __inst__.py file lives), a file in the user's home
342
+ directory named .pydistutils.cfg on Unix and pydistutils.cfg
343
+ on Windows/Mac; and setup.cfg in the current directory.
344
+
345
+ The file in the user's home directory can be disabled with the
346
+ --no-user-cfg option.
347
+ """
348
+ files = []
349
+ check_environ()
350
+
351
+ # Where to look for the system-wide Distutils config file
352
+ sys_dir = os.path.dirname(sys.modules['distutils'].__file__)
353
+
354
+ # Look for the system config file
355
+ sys_file = os.path.join(sys_dir, "distutils.cfg")
356
+ if os.path.isfile(sys_file):
357
+ files.append(sys_file)
358
+
359
+ # What to call the per-user config file
360
+ if os.name == 'posix':
361
+ user_filename = ".pydistutils.cfg"
362
+ else:
363
+ user_filename = "pydistutils.cfg"
364
+
365
+ # And look for the user config file
366
+ if self.want_user_cfg:
367
+ user_file = os.path.join(os.path.expanduser('~'), user_filename)
368
+ if os.path.isfile(user_file):
369
+ files.append(user_file)
370
+
371
+ # All platforms support local setup.cfg
372
+ local_file = "setup.cfg"
373
+ if os.path.isfile(local_file):
374
+ files.append(local_file)
375
+
376
+ if DEBUG:
377
+ self.announce("using config files: %s" % ', '.join(files))
378
+
379
+ return files
380
+
381
+ def parse_config_files(self, filenames=None):
382
+ from configparser import ConfigParser
383
+
384
+ # Ignore install directory options if we have a venv
385
+ if sys.prefix != sys.base_prefix:
386
+ ignore_options = [
387
+ 'install-base', 'install-platbase', 'install-lib',
388
+ 'install-platlib', 'install-purelib', 'install-headers',
389
+ 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
390
+ 'home', 'user', 'root']
391
+ else:
392
+ ignore_options = []
393
+
394
+ ignore_options = frozenset(ignore_options)
395
+
396
+ if filenames is None:
397
+ filenames = self.find_config_files()
398
+
399
+ if DEBUG:
400
+ self.announce("Distribution.parse_config_files():")
401
+
402
+ parser = ConfigParser()
403
+ for filename in filenames:
404
+ if DEBUG:
405
+ self.announce(" reading %s" % filename)
406
+ parser.read(filename)
407
+ for section in parser.sections():
408
+ options = parser.options(section)
409
+ opt_dict = self.get_option_dict(section)
410
+
411
+ for opt in options:
412
+ if opt != '__name__' and opt not in ignore_options:
413
+ val = parser.get(section,opt)
414
+ opt = opt.replace('-', '_')
415
+ opt_dict[opt] = (filename, val)
416
+
417
+ # Make the ConfigParser forget everything (so we retain
418
+ # the original filenames that options come from)
419
+ parser.__init__()
420
+
421
+ # If there was a "global" section in the config file, use it
422
+ # to set Distribution options.
423
+
424
+ if 'global' in self.command_options:
425
+ for (opt, (src, val)) in self.command_options['global'].items():
426
+ alias = self.negative_opt.get(opt)
427
+ try:
428
+ if alias:
429
+ setattr(self, alias, not strtobool(val))
430
+ elif opt in ('verbose', 'dry_run'): # ugh!
431
+ setattr(self, opt, strtobool(val))
432
+ else:
433
+ setattr(self, opt, val)
434
+ except ValueError as msg:
435
+ raise DistutilsOptionError(msg)
436
+
437
+ # -- Command-line parsing methods ----------------------------------
438
+
439
+ def parse_command_line(self):
440
+ """Parse the setup script's command line, taken from the
441
+ 'script_args' instance attribute (which defaults to 'sys.argv[1:]'
442
+ -- see 'setup()' in core.py). This list is first processed for
443
+ "global options" -- options that set attributes of the Distribution
444
+ instance. Then, it is alternately scanned for Distutils commands
445
+ and options for that command. Each new command terminates the
446
+ options for the previous command. The allowed options for a
447
+ command are determined by the 'user_options' attribute of the
448
+ command class -- thus, we have to be able to load command classes
449
+ in order to parse the command line. Any error in that 'options'
450
+ attribute raises DistutilsGetoptError; any error on the
451
+ command-line raises DistutilsArgError. If no Distutils commands
452
+ were found on the command line, raises DistutilsArgError. Return
453
+ true if command-line was successfully parsed and we should carry
454
+ on with executing commands; false if no errors but we shouldn't
455
+ execute commands (currently, this only happens if user asks for
456
+ help).
457
+ """
458
+ #
459
+ # We now have enough information to show the Macintosh dialog
460
+ # that allows the user to interactively specify the "command line".
461
+ #
462
+ toplevel_options = self._get_toplevel_options()
463
+
464
+ # We have to parse the command line a bit at a time -- global
465
+ # options, then the first command, then its options, and so on --
466
+ # because each command will be handled by a different class, and
467
+ # the options that are valid for a particular class aren't known
468
+ # until we have loaded the command class, which doesn't happen
469
+ # until we know what the command is.
470
+
471
+ self.commands = []
472
+ parser = FancyGetopt(toplevel_options + self.display_options)
473
+ parser.set_negative_aliases(self.negative_opt)
474
+ parser.set_aliases({'licence': 'license'})
475
+ args = parser.getopt(args=self.script_args, object=self)
476
+ option_order = parser.get_option_order()
477
+ log.set_verbosity(self.verbose)
478
+
479
+ # for display options we return immediately
480
+ if self.handle_display_options(option_order):
481
+ return
482
+ while args:
483
+ args = self._parse_command_opts(parser, args)
484
+ if args is None: # user asked for help (and got it)
485
+ return
486
+
487
+ # Handle the cases of --help as a "global" option, ie.
488
+ # "setup.py --help" and "setup.py --help command ...". For the
489
+ # former, we show global options (--verbose, --dry-run, etc.)
490
+ # and display-only options (--name, --version, etc.); for the
491
+ # latter, we omit the display-only options and show help for
492
+ # each command listed on the command line.
493
+ if self.help:
494
+ self._show_help(parser,
495
+ display_options=len(self.commands) == 0,
496
+ commands=self.commands)
497
+ return
498
+
499
+ # Oops, no commands found -- an end-user error
500
+ if not self.commands:
501
+ raise DistutilsArgError("no commands supplied")
502
+
503
+ # All is well: return true
504
+ return True
505
+
506
+ def _get_toplevel_options(self):
507
+ """Return the non-display options recognized at the top level.
508
+
509
+ This includes options that are recognized *only* at the top
510
+ level as well as options recognized for commands.
511
+ """
512
+ return self.global_options + [
513
+ ("command-packages=", None,
514
+ "list of packages that provide distutils commands"),
515
+ ]
516
+
517
+ def _parse_command_opts(self, parser, args):
518
+ """Parse the command-line options for a single command.
519
+ 'parser' must be a FancyGetopt instance; 'args' must be the list
520
+ of arguments, starting with the current command (whose options
521
+ we are about to parse). Returns a new version of 'args' with
522
+ the next command at the front of the list; will be the empty
523
+ list if there are no more commands on the command line. Returns
524
+ None if the user asked for help on this command.
525
+ """
526
+ # late import because of mutual dependence between these modules
527
+ from distutils.cmd import Command
528
+
529
+ # Pull the current command from the head of the command line
530
+ command = args[0]
531
+ if not command_re.match(command):
532
+ raise SystemExit("invalid command name '%s'" % command)
533
+ self.commands.append(command)
534
+
535
+ # Dig up the command class that implements this command, so we
536
+ # 1) know that it's a valid command, and 2) know which options
537
+ # it takes.
538
+ try:
539
+ cmd_class = self.get_command_class(command)
540
+ except DistutilsModuleError as msg:
541
+ raise DistutilsArgError(msg)
542
+
543
+ # Require that the command class be derived from Command -- want
544
+ # to be sure that the basic "command" interface is implemented.
545
+ if not issubclass(cmd_class, Command):
546
+ raise DistutilsClassError(
547
+ "command class %s must subclass Command" % cmd_class)
548
+
549
+ # Also make sure that the command object provides a list of its
550
+ # known options.
551
+ if not (hasattr(cmd_class, 'user_options') and
552
+ isinstance(cmd_class.user_options, list)):
553
+ msg = ("command class %s must provide "
554
+ "'user_options' attribute (a list of tuples)")
555
+ raise DistutilsClassError(msg % cmd_class)
556
+
557
+ # If the command class has a list of negative alias options,
558
+ # merge it in with the global negative aliases.
559
+ negative_opt = self.negative_opt
560
+ if hasattr(cmd_class, 'negative_opt'):
561
+ negative_opt = negative_opt.copy()
562
+ negative_opt.update(cmd_class.negative_opt)
563
+
564
+ # Check for help_options in command class. They have a different
565
+ # format (tuple of four) so we need to preprocess them here.
566
+ if (hasattr(cmd_class, 'help_options') and
567
+ isinstance(cmd_class.help_options, list)):
568
+ help_options = fix_help_options(cmd_class.help_options)
569
+ else:
570
+ help_options = []
571
+
572
+ # All commands support the global options too, just by adding
573
+ # in 'global_options'.
574
+ parser.set_option_table(self.global_options +
575
+ cmd_class.user_options +
576
+ help_options)
577
+ parser.set_negative_aliases(negative_opt)
578
+ (args, opts) = parser.getopt(args[1:])
579
+ if hasattr(opts, 'help') and opts.help:
580
+ self._show_help(parser, display_options=0, commands=[cmd_class])
581
+ return
582
+
583
+ if (hasattr(cmd_class, 'help_options') and
584
+ isinstance(cmd_class.help_options, list)):
585
+ help_option_found=0
586
+ for (help_option, short, desc, func) in cmd_class.help_options:
587
+ if hasattr(opts, parser.get_attr_name(help_option)):
588
+ help_option_found=1
589
+ if callable(func):
590
+ func()
591
+ else:
592
+ raise DistutilsClassError(
593
+ "invalid help function %r for help option '%s': "
594
+ "must be a callable object (function, etc.)"
595
+ % (func, help_option))
596
+
597
+ if help_option_found:
598
+ return
599
+
600
+ # Put the options from the command-line into their official
601
+ # holding pen, the 'command_options' dictionary.
602
+ opt_dict = self.get_option_dict(command)
603
+ for (name, value) in vars(opts).items():
604
+ opt_dict[name] = ("command line", value)
605
+
606
+ return args
607
+
608
+ def finalize_options(self):
609
+ """Set final values for all the options on the Distribution
610
+ instance, analogous to the .finalize_options() method of Command
611
+ objects.
612
+ """
613
+ for attr in ('keywords', 'platforms'):
614
+ value = getattr(self.metadata, attr)
615
+ if value is None:
616
+ continue
617
+ if isinstance(value, str):
618
+ value = [elm.strip() for elm in value.split(',')]
619
+ setattr(self.metadata, attr, value)
620
+
621
+ def _show_help(self, parser, global_options=1, display_options=1,
622
+ commands=[]):
623
+ """Show help for the setup script command-line in the form of
624
+ several lists of command-line options. 'parser' should be a
625
+ FancyGetopt instance; do not expect it to be returned in the
626
+ same state, as its option table will be reset to make it
627
+ generate the correct help text.
628
+
629
+ If 'global_options' is true, lists the global options:
630
+ --verbose, --dry-run, etc. If 'display_options' is true, lists
631
+ the "display-only" options: --name, --version, etc. Finally,
632
+ lists per-command help for every command name or command class
633
+ in 'commands'.
634
+ """
635
+ # late import because of mutual dependence between these modules
636
+ from distutils.core import gen_usage
637
+ from distutils.cmd import Command
638
+
639
+ if global_options:
640
+ if display_options:
641
+ options = self._get_toplevel_options()
642
+ else:
643
+ options = self.global_options
644
+ parser.set_option_table(options)
645
+ parser.print_help(self.common_usage + "\nGlobal options:")
646
+ print('')
647
+
648
+ if display_options:
649
+ parser.set_option_table(self.display_options)
650
+ parser.print_help(
651
+ "Information display options (just display " +
652
+ "information, ignore any commands)")
653
+ print('')
654
+
655
+ for command in self.commands:
656
+ if isinstance(command, type) and issubclass(command, Command):
657
+ klass = command
658
+ else:
659
+ klass = self.get_command_class(command)
660
+ if (hasattr(klass, 'help_options') and
661
+ isinstance(klass.help_options, list)):
662
+ parser.set_option_table(klass.user_options +
663
+ fix_help_options(klass.help_options))
664
+ else:
665
+ parser.set_option_table(klass.user_options)
666
+ parser.print_help("Options for '%s' command:" % klass.__name__)
667
+ print('')
668
+
669
+ print(gen_usage(self.script_name))
670
+
671
+ def handle_display_options(self, option_order):
672
+ """If there were any non-global "display-only" options
673
+ (--help-commands or the metadata display options) on the command
674
+ line, display the requested info and return true; else return
675
+ false.
676
+ """
677
+ from distutils.core import gen_usage
678
+
679
+ # User just wants a list of commands -- we'll print it out and stop
680
+ # processing now (ie. if they ran "setup --help-commands foo bar",
681
+ # we ignore "foo bar").
682
+ if self.help_commands:
683
+ self.print_commands()
684
+ print('')
685
+ print(gen_usage(self.script_name))
686
+ return 1
687
+
688
+ # If user supplied any of the "display metadata" options, then
689
+ # display that metadata in the order in which the user supplied the
690
+ # metadata options.
691
+ any_display_options = 0
692
+ is_display_option = {}
693
+ for option in self.display_options:
694
+ is_display_option[option[0]] = 1
695
+
696
+ for (opt, val) in option_order:
697
+ if val and is_display_option.get(opt):
698
+ opt = translate_longopt(opt)
699
+ value = getattr(self.metadata, "get_"+opt)()
700
+ if opt in ['keywords', 'platforms']:
701
+ print(','.join(value))
702
+ elif opt in ('classifiers', 'provides', 'requires',
703
+ 'obsoletes'):
704
+ print('\n'.join(value))
705
+ else:
706
+ print(value)
707
+ any_display_options = 1
708
+
709
+ return any_display_options
710
+
711
+ def print_command_list(self, commands, header, max_length):
712
+ """Print a subset of the list of all commands -- used by
713
+ 'print_commands()'.
714
+ """
715
+ print(header + ":")
716
+
717
+ for cmd in commands:
718
+ klass = self.cmdclass.get(cmd)
719
+ if not klass:
720
+ klass = self.get_command_class(cmd)
721
+ try:
722
+ description = klass.description
723
+ except AttributeError:
724
+ description = "(no description available)"
725
+
726
+ print(" %-*s %s" % (max_length, cmd, description))
727
+
728
+ def print_commands(self):
729
+ """Print out a help message listing all available commands with a
730
+ description of each. The list is divided into "standard commands"
731
+ (listed in distutils.command.__all__) and "extra commands"
732
+ (mentioned in self.cmdclass, but not a standard command). The
733
+ descriptions come from the command class attribute
734
+ 'description'.
735
+ """
736
+ import distutils.command
737
+ std_commands = distutils.command.__all__
738
+ is_std = {}
739
+ for cmd in std_commands:
740
+ is_std[cmd] = 1
741
+
742
+ extra_commands = []
743
+ for cmd in self.cmdclass.keys():
744
+ if not is_std.get(cmd):
745
+ extra_commands.append(cmd)
746
+
747
+ max_length = 0
748
+ for cmd in (std_commands + extra_commands):
749
+ if len(cmd) > max_length:
750
+ max_length = len(cmd)
751
+
752
+ self.print_command_list(std_commands,
753
+ "Standard commands",
754
+ max_length)
755
+ if extra_commands:
756
+ print()
757
+ self.print_command_list(extra_commands,
758
+ "Extra commands",
759
+ max_length)
760
+
761
+ def get_command_list(self):
762
+ """Get a list of (command, description) tuples.
763
+ The list is divided into "standard commands" (listed in
764
+ distutils.command.__all__) and "extra commands" (mentioned in
765
+ self.cmdclass, but not a standard command). The descriptions come
766
+ from the command class attribute 'description'.
767
+ """
768
+ # Currently this is only used on Mac OS, for the Mac-only GUI
769
+ # Distutils interface (by Jack Jansen)
770
+ import distutils.command
771
+ std_commands = distutils.command.__all__
772
+ is_std = {}
773
+ for cmd in std_commands:
774
+ is_std[cmd] = 1
775
+
776
+ extra_commands = []
777
+ for cmd in self.cmdclass.keys():
778
+ if not is_std.get(cmd):
779
+ extra_commands.append(cmd)
780
+
781
+ rv = []
782
+ for cmd in (std_commands + extra_commands):
783
+ klass = self.cmdclass.get(cmd)
784
+ if not klass:
785
+ klass = self.get_command_class(cmd)
786
+ try:
787
+ description = klass.description
788
+ except AttributeError:
789
+ description = "(no description available)"
790
+ rv.append((cmd, description))
791
+ return rv
792
+
793
+ # -- Command class/object methods ----------------------------------
794
+
795
+ def get_command_packages(self):
796
+ """Return a list of packages from which commands are loaded."""
797
+ pkgs = self.command_packages
798
+ if not isinstance(pkgs, list):
799
+ if pkgs is None:
800
+ pkgs = ''
801
+ pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
802
+ if "distutils.command" not in pkgs:
803
+ pkgs.insert(0, "distutils.command")
804
+ self.command_packages = pkgs
805
+ return pkgs
806
+
807
+ def get_command_class(self, command):
808
+ """Return the class that implements the Distutils command named by
809
+ 'command'. First we check the 'cmdclass' dictionary; if the
810
+ command is mentioned there, we fetch the class object from the
811
+ dictionary and return it. Otherwise we load the command module
812
+ ("distutils.command." + command) and fetch the command class from
813
+ the module. The loaded class is also stored in 'cmdclass'
814
+ to speed future calls to 'get_command_class()'.
815
+
816
+ Raises DistutilsModuleError if the expected module could not be
817
+ found, or if that module does not define the expected class.
818
+ """
819
+ klass = self.cmdclass.get(command)
820
+ if klass:
821
+ return klass
822
+
823
+ for pkgname in self.get_command_packages():
824
+ module_name = "%s.%s" % (pkgname, command)
825
+ klass_name = command
826
+
827
+ try:
828
+ __import__(module_name)
829
+ module = sys.modules[module_name]
830
+ except ImportError:
831
+ continue
832
+
833
+ try:
834
+ klass = getattr(module, klass_name)
835
+ except AttributeError:
836
+ raise DistutilsModuleError(
837
+ "invalid command '%s' (no class '%s' in module '%s')"
838
+ % (command, klass_name, module_name))
839
+
840
+ self.cmdclass[command] = klass
841
+ return klass
842
+
843
+ raise DistutilsModuleError("invalid command '%s'" % command)
844
+
845
+ def get_command_obj(self, command, create=1):
846
+ """Return the command object for 'command'. Normally this object
847
+ is cached on a previous call to 'get_command_obj()'; if no command
848
+ object for 'command' is in the cache, then we either create and
849
+ return it (if 'create' is true) or return None.
850
+ """
851
+ cmd_obj = self.command_obj.get(command)
852
+ if not cmd_obj and create:
853
+ if DEBUG:
854
+ self.announce("Distribution.get_command_obj(): "
855
+ "creating '%s' command object" % command)
856
+
857
+ klass = self.get_command_class(command)
858
+ cmd_obj = self.command_obj[command] = klass(self)
859
+ self.have_run[command] = 0
860
+
861
+ # Set any options that were supplied in config files
862
+ # or on the command line. (NB. support for error
863
+ # reporting is lame here: any errors aren't reported
864
+ # until 'finalize_options()' is called, which means
865
+ # we won't report the source of the error.)
866
+ options = self.command_options.get(command)
867
+ if options:
868
+ self._set_command_options(cmd_obj, options)
869
+
870
+ return cmd_obj
871
+
872
+ def _set_command_options(self, command_obj, option_dict=None):
873
+ """Set the options for 'command_obj' from 'option_dict'. Basically
874
+ this means copying elements of a dictionary ('option_dict') to
875
+ attributes of an instance ('command').
876
+
877
+ 'command_obj' must be a Command instance. If 'option_dict' is not
878
+ supplied, uses the standard option dictionary for this command
879
+ (from 'self.command_options').
880
+ """
881
+ command_name = command_obj.get_command_name()
882
+ if option_dict is None:
883
+ option_dict = self.get_option_dict(command_name)
884
+
885
+ if DEBUG:
886
+ self.announce(" setting options for '%s' command:" % command_name)
887
+ for (option, (source, value)) in option_dict.items():
888
+ if DEBUG:
889
+ self.announce(" %s = %s (from %s)" % (option, value,
890
+ source))
891
+ try:
892
+ bool_opts = [translate_longopt(o)
893
+ for o in command_obj.boolean_options]
894
+ except AttributeError:
895
+ bool_opts = []
896
+ try:
897
+ neg_opt = command_obj.negative_opt
898
+ except AttributeError:
899
+ neg_opt = {}
900
+
901
+ try:
902
+ is_string = isinstance(value, str)
903
+ if option in neg_opt and is_string:
904
+ setattr(command_obj, neg_opt[option], not strtobool(value))
905
+ elif option in bool_opts and is_string:
906
+ setattr(command_obj, option, strtobool(value))
907
+ elif hasattr(command_obj, option):
908
+ setattr(command_obj, option, value)
909
+ else:
910
+ raise DistutilsOptionError(
911
+ "error in %s: command '%s' has no such option '%s'"
912
+ % (source, command_name, option))
913
+ except ValueError as msg:
914
+ raise DistutilsOptionError(msg)
915
+
916
+ def reinitialize_command(self, command, reinit_subcommands=0):
917
+ """Reinitializes a command to the state it was in when first
918
+ returned by 'get_command_obj()': ie., initialized but not yet
919
+ finalized. This provides the opportunity to sneak option
920
+ values in programmatically, overriding or supplementing
921
+ user-supplied values from the config files and command line.
922
+ You'll have to re-finalize the command object (by calling
923
+ 'finalize_options()' or 'ensure_finalized()') before using it for
924
+ real.
925
+
926
+ 'command' should be a command name (string) or command object. If
927
+ 'reinit_subcommands' is true, also reinitializes the command's
928
+ sub-commands, as declared by the 'sub_commands' class attribute (if
929
+ it has one). See the "install" command for an example. Only
930
+ reinitializes the sub-commands that actually matter, ie. those
931
+ whose test predicates return true.
932
+
933
+ Returns the reinitialized command object.
934
+ """
935
+ from distutils.cmd import Command
936
+ if not isinstance(command, Command):
937
+ command_name = command
938
+ command = self.get_command_obj(command_name)
939
+ else:
940
+ command_name = command.get_command_name()
941
+
942
+ if not command.finalized:
943
+ return command
944
+ command.initialize_options()
945
+ command.finalized = 0
946
+ self.have_run[command_name] = 0
947
+ self._set_command_options(command)
948
+
949
+ if reinit_subcommands:
950
+ for sub in command.get_sub_commands():
951
+ self.reinitialize_command(sub, reinit_subcommands)
952
+
953
+ return command
954
+
955
+ # -- Methods that operate on the Distribution ----------------------
956
+
957
+ def announce(self, msg, level=log.INFO):
958
+ log.log(level, msg)
959
+
960
+ def run_commands(self):
961
+ """Run each command that was seen on the setup script command line.
962
+ Uses the list of commands found and cache of command objects
963
+ created by 'get_command_obj()'.
964
+ """
965
+ for cmd in self.commands:
966
+ self.run_command(cmd)
967
+
968
+ # -- Methods that operate on its Commands --------------------------
969
+
970
+ def run_command(self, command):
971
+ """Do whatever it takes to run a command (including nothing at all,
972
+ if the command has already been run). Specifically: if we have
973
+ already created and run the command named by 'command', return
974
+ silently without doing anything. If the command named by 'command'
975
+ doesn't even have a command object yet, create one. Then invoke
976
+ 'run()' on that command object (or an existing one).
977
+ """
978
+ # Already been here, done that? then return silently.
979
+ if self.have_run.get(command):
980
+ return
981
+
982
+ log.info("running %s", command)
983
+ cmd_obj = self.get_command_obj(command)
984
+ cmd_obj.ensure_finalized()
985
+ cmd_obj.run()
986
+ self.have_run[command] = 1
987
+
988
+ # -- Distribution query methods ------------------------------------
989
+
990
+ def has_pure_modules(self):
991
+ return len(self.packages or self.py_modules or []) > 0
992
+
993
+ def has_ext_modules(self):
994
+ return self.ext_modules and len(self.ext_modules) > 0
995
+
996
+ def has_c_libraries(self):
997
+ return self.libraries and len(self.libraries) > 0
998
+
999
+ def has_modules(self):
1000
+ return self.has_pure_modules() or self.has_ext_modules()
1001
+
1002
+ def has_headers(self):
1003
+ return self.headers and len(self.headers) > 0
1004
+
1005
+ def has_scripts(self):
1006
+ return self.scripts and len(self.scripts) > 0
1007
+
1008
+ def has_data_files(self):
1009
+ return self.data_files and len(self.data_files) > 0
1010
+
1011
+ def is_pure(self):
1012
+ return (self.has_pure_modules() and
1013
+ not self.has_ext_modules() and
1014
+ not self.has_c_libraries())
1015
+
1016
+ # -- Metadata query methods ----------------------------------------
1017
+
1018
+ # If you're looking for 'get_name()', 'get_version()', and so forth,
1019
+ # they are defined in a sneaky way: the constructor binds self.get_XXX
1020
+ # to self.metadata.get_XXX. The actual code is in the
1021
+ # DistributionMetadata class, below.
1022
+
1023
+ class DistributionMetadata:
1024
+ """Dummy class to hold the distribution meta-data: name, version,
1025
+ author, and so forth.
1026
+ """
1027
+
1028
+ _METHOD_BASENAMES = ("name", "version", "author", "author_email",
1029
+ "maintainer", "maintainer_email", "url",
1030
+ "license", "description", "long_description",
1031
+ "keywords", "platforms", "fullname", "contact",
1032
+ "contact_email", "classifiers", "download_url",
1033
+ # PEP 314
1034
+ "provides", "requires", "obsoletes",
1035
+ )
1036
+
1037
+ def __init__(self, path=None):
1038
+ if path is not None:
1039
+ self.read_pkg_file(open(path))
1040
+ else:
1041
+ self.name = None
1042
+ self.version = None
1043
+ self.author = None
1044
+ self.author_email = None
1045
+ self.maintainer = None
1046
+ self.maintainer_email = None
1047
+ self.url = None
1048
+ self.license = None
1049
+ self.description = None
1050
+ self.long_description = None
1051
+ self.keywords = None
1052
+ self.platforms = None
1053
+ self.classifiers = None
1054
+ self.download_url = None
1055
+ # PEP 314
1056
+ self.provides = None
1057
+ self.requires = None
1058
+ self.obsoletes = None
1059
+
1060
+ def read_pkg_file(self, file):
1061
+ """Reads the metadata values from a file object."""
1062
+ msg = message_from_file(file)
1063
+
1064
+ def _read_field(name):
1065
+ value = msg[name]
1066
+ if value == 'UNKNOWN':
1067
+ return None
1068
+ return value
1069
+
1070
+ def _read_list(name):
1071
+ values = msg.get_all(name, None)
1072
+ if values == []:
1073
+ return None
1074
+ return values
1075
+
1076
+ metadata_version = msg['metadata-version']
1077
+ self.name = _read_field('name')
1078
+ self.version = _read_field('version')
1079
+ self.description = _read_field('summary')
1080
+ # we are filling author only.
1081
+ self.author = _read_field('author')
1082
+ self.maintainer = None
1083
+ self.author_email = _read_field('author-email')
1084
+ self.maintainer_email = None
1085
+ self.url = _read_field('home-page')
1086
+ self.license = _read_field('license')
1087
+
1088
+ if 'download-url' in msg:
1089
+ self.download_url = _read_field('download-url')
1090
+ else:
1091
+ self.download_url = None
1092
+
1093
+ self.long_description = _read_field('description')
1094
+ self.description = _read_field('summary')
1095
+
1096
+ if 'keywords' in msg:
1097
+ self.keywords = _read_field('keywords').split(',')
1098
+
1099
+ self.platforms = _read_list('platform')
1100
+ self.classifiers = _read_list('classifier')
1101
+
1102
+ # PEP 314 - these fields only exist in 1.1
1103
+ if metadata_version == '1.1':
1104
+ self.requires = _read_list('requires')
1105
+ self.provides = _read_list('provides')
1106
+ self.obsoletes = _read_list('obsoletes')
1107
+ else:
1108
+ self.requires = None
1109
+ self.provides = None
1110
+ self.obsoletes = None
1111
+
1112
+ def write_pkg_info(self, base_dir):
1113
+ """Write the PKG-INFO file into the release tree.
1114
+ """
1115
+ with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
1116
+ encoding='UTF-8') as pkg_info:
1117
+ self.write_pkg_file(pkg_info)
1118
+
1119
+ def write_pkg_file(self, file):
1120
+ """Write the PKG-INFO format data to a file object.
1121
+ """
1122
+ version = '1.0'
1123
+ if (self.provides or self.requires or self.obsoletes or
1124
+ self.classifiers or self.download_url):
1125
+ version = '1.1'
1126
+
1127
+ file.write('Metadata-Version: %s\n' % version)
1128
+ file.write('Name: %s\n' % self.get_name())
1129
+ file.write('Version: %s\n' % self.get_version())
1130
+ file.write('Summary: %s\n' % self.get_description())
1131
+ file.write('Home-page: %s\n' % self.get_url())
1132
+ file.write('Author: %s\n' % self.get_contact())
1133
+ file.write('Author-email: %s\n' % self.get_contact_email())
1134
+ file.write('License: %s\n' % self.get_license())
1135
+ if self.download_url:
1136
+ file.write('Download-URL: %s\n' % self.download_url)
1137
+
1138
+ long_desc = rfc822_escape(self.get_long_description())
1139
+ file.write('Description: %s\n' % long_desc)
1140
+
1141
+ keywords = ','.join(self.get_keywords())
1142
+ if keywords:
1143
+ file.write('Keywords: %s\n' % keywords)
1144
+
1145
+ self._write_list(file, 'Platform', self.get_platforms())
1146
+ self._write_list(file, 'Classifier', self.get_classifiers())
1147
+
1148
+ # PEP 314
1149
+ self._write_list(file, 'Requires', self.get_requires())
1150
+ self._write_list(file, 'Provides', self.get_provides())
1151
+ self._write_list(file, 'Obsoletes', self.get_obsoletes())
1152
+
1153
+ def _write_list(self, file, name, values):
1154
+ for value in values:
1155
+ file.write('%s: %s\n' % (name, value))
1156
+
1157
+ # -- Metadata query methods ----------------------------------------
1158
+
1159
+ def get_name(self):
1160
+ return self.name or "UNKNOWN"
1161
+
1162
+ def get_version(self):
1163
+ return self.version or "0.0.0"
1164
+
1165
+ def get_fullname(self):
1166
+ return "%s-%s" % (self.get_name(), self.get_version())
1167
+
1168
+ def get_author(self):
1169
+ return self.author or "UNKNOWN"
1170
+
1171
+ def get_author_email(self):
1172
+ return self.author_email or "UNKNOWN"
1173
+
1174
+ def get_maintainer(self):
1175
+ return self.maintainer or "UNKNOWN"
1176
+
1177
+ def get_maintainer_email(self):
1178
+ return self.maintainer_email or "UNKNOWN"
1179
+
1180
+ def get_contact(self):
1181
+ return self.maintainer or self.author or "UNKNOWN"
1182
+
1183
+ def get_contact_email(self):
1184
+ return self.maintainer_email or self.author_email or "UNKNOWN"
1185
+
1186
+ def get_url(self):
1187
+ return self.url or "UNKNOWN"
1188
+
1189
+ def get_license(self):
1190
+ return self.license or "UNKNOWN"
1191
+ get_licence = get_license
1192
+
1193
+ def get_description(self):
1194
+ return self.description or "UNKNOWN"
1195
+
1196
+ def get_long_description(self):
1197
+ return self.long_description or "UNKNOWN"
1198
+
1199
+ def get_keywords(self):
1200
+ return self.keywords or []
1201
+
1202
+ def set_keywords(self, value):
1203
+ self.keywords = _ensure_list(value, 'keywords')
1204
+
1205
+ def get_platforms(self):
1206
+ return self.platforms or ["UNKNOWN"]
1207
+
1208
+ def set_platforms(self, value):
1209
+ self.platforms = _ensure_list(value, 'platforms')
1210
+
1211
+ def get_classifiers(self):
1212
+ return self.classifiers or []
1213
+
1214
+ def set_classifiers(self, value):
1215
+ self.classifiers = _ensure_list(value, 'classifiers')
1216
+
1217
+ def get_download_url(self):
1218
+ return self.download_url or "UNKNOWN"
1219
+
1220
+ # PEP 314
1221
+ def get_requires(self):
1222
+ return self.requires or []
1223
+
1224
+ def set_requires(self, value):
1225
+ import distutils.versionpredicate
1226
+ for v in value:
1227
+ distutils.versionpredicate.VersionPredicate(v)
1228
+ self.requires = list(value)
1229
+
1230
+ def get_provides(self):
1231
+ return self.provides or []
1232
+
1233
+ def set_provides(self, value):
1234
+ value = [v.strip() for v in value]
1235
+ for v in value:
1236
+ import distutils.versionpredicate
1237
+ distutils.versionpredicate.split_provision(v)
1238
+ self.provides = value
1239
+
1240
+ def get_obsoletes(self):
1241
+ return self.obsoletes or []
1242
+
1243
+ def set_obsoletes(self, value):
1244
+ import distutils.versionpredicate
1245
+ for v in value:
1246
+ distutils.versionpredicate.VersionPredicate(v)
1247
+ self.obsoletes = list(value)
1248
+
1249
+ def fix_help_options(options):
1250
+ """Convert a 4-tuple 'help_options' list as found in various command
1251
+ classes to the 3-tuple form required by FancyGetopt.
1252
+ """
1253
+ new_options = []
1254
+ for help_tuple in options:
1255
+ new_options.append(help_tuple[0:3])
1256
+ return new_options
janus/lib/python3.10/distutils/errors.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.errors
2
+
3
+ Provides exceptions used by the Distutils modules. Note that Distutils
4
+ modules may raise standard exceptions; in particular, SystemExit is
5
+ usually raised for errors that are obviously the end-user's fault
6
+ (eg. bad command-line arguments).
7
+
8
+ This module is safe to use in "from ... import *" mode; it only exports
9
+ symbols whose names start with "Distutils" and end with "Error"."""
10
+
11
+ class DistutilsError (Exception):
12
+ """The root of all Distutils evil."""
13
+ pass
14
+
15
+ class DistutilsModuleError (DistutilsError):
16
+ """Unable to load an expected module, or to find an expected class
17
+ within some module (in particular, command modules and classes)."""
18
+ pass
19
+
20
+ class DistutilsClassError (DistutilsError):
21
+ """Some command class (or possibly distribution class, if anyone
22
+ feels a need to subclass Distribution) is found not to be holding
23
+ up its end of the bargain, ie. implementing some part of the
24
+ "command "interface."""
25
+ pass
26
+
27
+ class DistutilsGetoptError (DistutilsError):
28
+ """The option table provided to 'fancy_getopt()' is bogus."""
29
+ pass
30
+
31
+ class DistutilsArgError (DistutilsError):
32
+ """Raised by fancy_getopt in response to getopt.error -- ie. an
33
+ error in the command line usage."""
34
+ pass
35
+
36
+ class DistutilsFileError (DistutilsError):
37
+ """Any problems in the filesystem: expected file not found, etc.
38
+ Typically this is for problems that we detect before OSError
39
+ could be raised."""
40
+ pass
41
+
42
+ class DistutilsOptionError (DistutilsError):
43
+ """Syntactic/semantic errors in command options, such as use of
44
+ mutually conflicting options, or inconsistent options,
45
+ badly-spelled values, etc. No distinction is made between option
46
+ values originating in the setup script, the command line, config
47
+ files, or what-have-you -- but if we *know* something originated in
48
+ the setup script, we'll raise DistutilsSetupError instead."""
49
+ pass
50
+
51
+ class DistutilsSetupError (DistutilsError):
52
+ """For errors that can be definitely blamed on the setup script,
53
+ such as invalid keyword arguments to 'setup()'."""
54
+ pass
55
+
56
+ class DistutilsPlatformError (DistutilsError):
57
+ """We don't know how to do something on the current platform (but
58
+ we do know how to do it on some platform) -- eg. trying to compile
59
+ C files on a platform not supported by a CCompiler subclass."""
60
+ pass
61
+
62
+ class DistutilsExecError (DistutilsError):
63
+ """Any problems executing an external program (such as the C
64
+ compiler, when compiling C files)."""
65
+ pass
66
+
67
+ class DistutilsInternalError (DistutilsError):
68
+ """Internal inconsistencies or impossibilities (obviously, this
69
+ should never be seen if the code is working!)."""
70
+ pass
71
+
72
+ class DistutilsTemplateError (DistutilsError):
73
+ """Syntax error in a file list template."""
74
+
75
+ class DistutilsByteCompileError(DistutilsError):
76
+ """Byte compile error."""
77
+
78
+ # Exception classes used by the CCompiler implementation classes
79
+ class CCompilerError (Exception):
80
+ """Some compile/link operation failed."""
81
+
82
+ class PreprocessError (CCompilerError):
83
+ """Failure to preprocess one or more C/C++ files."""
84
+
85
+ class CompileError (CCompilerError):
86
+ """Failure to compile one or more C/C++ source files."""
87
+
88
+ class LibError (CCompilerError):
89
+ """Failure to create a static library from one or more C/C++ object
90
+ files."""
91
+
92
+ class LinkError (CCompilerError):
93
+ """Failure to link one or more C/C++ object files into an executable
94
+ or shared library file."""
95
+
96
+ class UnknownFileError (CCompilerError):
97
+ """Attempt to process an unknown file type."""
janus/lib/python3.10/distutils/tests/__pycache__/test_build_py.cpython-310.pyc ADDED
Binary file (5.02 kB). View file
 
janus/lib/python3.10/distutils/tests/includetest.rst ADDED
@@ -0,0 +1 @@
 
 
1
+ This should be included.
janus/lib/python3.10/distutils/tests/test_build_ext.py ADDED
@@ -0,0 +1,553 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import os
3
+ from io import StringIO
4
+ import textwrap
5
+
6
+ from distutils.core import Distribution
7
+ from distutils.command.build_ext import build_ext
8
+ from distutils import sysconfig
9
+ from distutils.tests.support import (TempdirManager, LoggingSilencer,
10
+ copy_xxmodule_c, fixup_build_ext)
11
+ from distutils.extension import Extension
12
+ from distutils.errors import (
13
+ CompileError, DistutilsPlatformError, DistutilsSetupError,
14
+ UnknownFileError)
15
+
16
+ import unittest
17
+ from test import support
18
+ from test.support import os_helper
19
+ from test.support.script_helper import assert_python_ok
20
+
21
+ # http://bugs.python.org/issue4373
22
+ # Don't load the xx module more than once.
23
+ ALREADY_TESTED = False
24
+
25
+
26
+ class BuildExtTestCase(TempdirManager,
27
+ LoggingSilencer,
28
+ unittest.TestCase):
29
+ def setUp(self):
30
+ # Create a simple test environment
31
+ super(BuildExtTestCase, self).setUp()
32
+ self.tmp_dir = self.mkdtemp()
33
+ import site
34
+ self.old_user_base = site.USER_BASE
35
+ site.USER_BASE = self.mkdtemp()
36
+ from distutils.command import build_ext
37
+ build_ext.USER_BASE = site.USER_BASE
38
+ self.old_config_vars = dict(sysconfig._config_vars)
39
+
40
+ # bpo-30132: On Windows, a .pdb file may be created in the current
41
+ # working directory. Create a temporary working directory to cleanup
42
+ # everything at the end of the test.
43
+ change_cwd = os_helper.change_cwd(self.tmp_dir)
44
+ change_cwd.__enter__()
45
+ self.addCleanup(change_cwd.__exit__, None, None, None)
46
+
47
+ def tearDown(self):
48
+ import site
49
+ site.USER_BASE = self.old_user_base
50
+ from distutils.command import build_ext
51
+ build_ext.USER_BASE = self.old_user_base
52
+ sysconfig._config_vars.clear()
53
+ sysconfig._config_vars.update(self.old_config_vars)
54
+ super(BuildExtTestCase, self).tearDown()
55
+
56
+ def build_ext(self, *args, **kwargs):
57
+ return build_ext(*args, **kwargs)
58
+
59
+ def test_build_ext(self):
60
+ cmd = support.missing_compiler_executable()
61
+ if cmd is not None:
62
+ self.skipTest('The %r command is not found' % cmd)
63
+ global ALREADY_TESTED
64
+ copy_xxmodule_c(self.tmp_dir)
65
+ xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
66
+ xx_ext = Extension('xx', [xx_c])
67
+ dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
68
+ dist.package_dir = self.tmp_dir
69
+ cmd = self.build_ext(dist)
70
+ fixup_build_ext(cmd)
71
+ cmd.build_lib = self.tmp_dir
72
+ cmd.build_temp = self.tmp_dir
73
+
74
+ old_stdout = sys.stdout
75
+ if not support.verbose:
76
+ # silence compiler output
77
+ sys.stdout = StringIO()
78
+ try:
79
+ cmd.ensure_finalized()
80
+ cmd.run()
81
+ finally:
82
+ sys.stdout = old_stdout
83
+
84
+ if ALREADY_TESTED:
85
+ self.skipTest('Already tested in %s' % ALREADY_TESTED)
86
+ else:
87
+ ALREADY_TESTED = type(self).__name__
88
+
89
+ code = textwrap.dedent(f"""
90
+ tmp_dir = {self.tmp_dir!r}
91
+
92
+ import sys
93
+ import unittest
94
+ from test import support
95
+
96
+ sys.path.insert(0, tmp_dir)
97
+ import xx
98
+
99
+ class Tests(unittest.TestCase):
100
+ def test_xx(self):
101
+ for attr in ('error', 'foo', 'new', 'roj'):
102
+ self.assertTrue(hasattr(xx, attr))
103
+
104
+ self.assertEqual(xx.foo(2, 5), 7)
105
+ self.assertEqual(xx.foo(13,15), 28)
106
+ self.assertEqual(xx.new().demo(), None)
107
+ if support.HAVE_DOCSTRINGS:
108
+ doc = 'This is a template module just for instruction.'
109
+ self.assertEqual(xx.__doc__, doc)
110
+ self.assertIsInstance(xx.Null(), xx.Null)
111
+ self.assertIsInstance(xx.Str(), xx.Str)
112
+
113
+
114
+ unittest.main()
115
+ """)
116
+ assert_python_ok('-c', code)
117
+
118
+ def test_solaris_enable_shared(self):
119
+ dist = Distribution({'name': 'xx'})
120
+ cmd = self.build_ext(dist)
121
+ old = sys.platform
122
+
123
+ sys.platform = 'sunos' # fooling finalize_options
124
+ from distutils.sysconfig import _config_vars
125
+ old_var = _config_vars.get('Py_ENABLE_SHARED')
126
+ _config_vars['Py_ENABLE_SHARED'] = 1
127
+ try:
128
+ cmd.ensure_finalized()
129
+ finally:
130
+ sys.platform = old
131
+ if old_var is None:
132
+ del _config_vars['Py_ENABLE_SHARED']
133
+ else:
134
+ _config_vars['Py_ENABLE_SHARED'] = old_var
135
+
136
+ # make sure we get some library dirs under solaris
137
+ self.assertGreater(len(cmd.library_dirs), 0)
138
+
139
+ def test_user_site(self):
140
+ import site
141
+ dist = Distribution({'name': 'xx'})
142
+ cmd = self.build_ext(dist)
143
+
144
+ # making sure the user option is there
145
+ options = [name for name, short, lable in
146
+ cmd.user_options]
147
+ self.assertIn('user', options)
148
+
149
+ # setting a value
150
+ cmd.user = 1
151
+
152
+ # setting user based lib and include
153
+ lib = os.path.join(site.USER_BASE, 'lib')
154
+ incl = os.path.join(site.USER_BASE, 'include')
155
+ os.mkdir(lib)
156
+ os.mkdir(incl)
157
+
158
+ # let's run finalize
159
+ cmd.ensure_finalized()
160
+
161
+ # see if include_dirs and library_dirs
162
+ # were set
163
+ self.assertIn(lib, cmd.library_dirs)
164
+ self.assertIn(lib, cmd.rpath)
165
+ self.assertIn(incl, cmd.include_dirs)
166
+
167
+ def test_optional_extension(self):
168
+
169
+ # this extension will fail, but let's ignore this failure
170
+ # with the optional argument.
171
+ modules = [Extension('foo', ['xxx'], optional=False)]
172
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
173
+ cmd = self.build_ext(dist)
174
+ cmd.ensure_finalized()
175
+ self.assertRaises((UnknownFileError, CompileError),
176
+ cmd.run) # should raise an error
177
+
178
+ modules = [Extension('foo', ['xxx'], optional=True)]
179
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
180
+ cmd = self.build_ext(dist)
181
+ cmd.ensure_finalized()
182
+ cmd.run() # should pass
183
+
184
+ def test_finalize_options(self):
185
+ # Make sure Python's include directories (for Python.h, pyconfig.h,
186
+ # etc.) are in the include search path.
187
+ modules = [Extension('foo', ['xxx'], optional=False)]
188
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
189
+ cmd = self.build_ext(dist)
190
+ cmd.finalize_options()
191
+
192
+ py_include = sysconfig.get_python_inc()
193
+ for p in py_include.split(os.path.pathsep):
194
+ self.assertIn(p, cmd.include_dirs)
195
+
196
+ plat_py_include = sysconfig.get_python_inc(plat_specific=1)
197
+ for p in plat_py_include.split(os.path.pathsep):
198
+ self.assertIn(p, cmd.include_dirs)
199
+
200
+ # make sure cmd.libraries is turned into a list
201
+ # if it's a string
202
+ cmd = self.build_ext(dist)
203
+ cmd.libraries = 'my_lib, other_lib lastlib'
204
+ cmd.finalize_options()
205
+ self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib'])
206
+
207
+ # make sure cmd.library_dirs is turned into a list
208
+ # if it's a string
209
+ cmd = self.build_ext(dist)
210
+ cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep
211
+ cmd.finalize_options()
212
+ self.assertIn('my_lib_dir', cmd.library_dirs)
213
+ self.assertIn('other_lib_dir', cmd.library_dirs)
214
+
215
+ # make sure rpath is turned into a list
216
+ # if it's a string
217
+ cmd = self.build_ext(dist)
218
+ cmd.rpath = 'one%stwo' % os.pathsep
219
+ cmd.finalize_options()
220
+ self.assertEqual(cmd.rpath, ['one', 'two'])
221
+
222
+ # make sure cmd.link_objects is turned into a list
223
+ # if it's a string
224
+ cmd = build_ext(dist)
225
+ cmd.link_objects = 'one two,three'
226
+ cmd.finalize_options()
227
+ self.assertEqual(cmd.link_objects, ['one', 'two', 'three'])
228
+
229
+ # XXX more tests to perform for win32
230
+
231
+ # make sure define is turned into 2-tuples
232
+ # strings if they are ','-separated strings
233
+ cmd = self.build_ext(dist)
234
+ cmd.define = 'one,two'
235
+ cmd.finalize_options()
236
+ self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])
237
+
238
+ # make sure undef is turned into a list of
239
+ # strings if they are ','-separated strings
240
+ cmd = self.build_ext(dist)
241
+ cmd.undef = 'one,two'
242
+ cmd.finalize_options()
243
+ self.assertEqual(cmd.undef, ['one', 'two'])
244
+
245
+ # make sure swig_opts is turned into a list
246
+ cmd = self.build_ext(dist)
247
+ cmd.swig_opts = None
248
+ cmd.finalize_options()
249
+ self.assertEqual(cmd.swig_opts, [])
250
+
251
+ cmd = self.build_ext(dist)
252
+ cmd.swig_opts = '1 2'
253
+ cmd.finalize_options()
254
+ self.assertEqual(cmd.swig_opts, ['1', '2'])
255
+
256
+ def test_check_extensions_list(self):
257
+ dist = Distribution()
258
+ cmd = self.build_ext(dist)
259
+ cmd.finalize_options()
260
+
261
+ #'extensions' option must be a list of Extension instances
262
+ self.assertRaises(DistutilsSetupError,
263
+ cmd.check_extensions_list, 'foo')
264
+
265
+ # each element of 'ext_modules' option must be an
266
+ # Extension instance or 2-tuple
267
+ exts = [('bar', 'foo', 'bar'), 'foo']
268
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
269
+
270
+ # first element of each tuple in 'ext_modules'
271
+ # must be the extension name (a string) and match
272
+ # a python dotted-separated name
273
+ exts = [('foo-bar', '')]
274
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
275
+
276
+ # second element of each tuple in 'ext_modules'
277
+ # must be a dictionary (build info)
278
+ exts = [('foo.bar', '')]
279
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
280
+
281
+ # ok this one should pass
282
+ exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
283
+ 'some': 'bar'})]
284
+ cmd.check_extensions_list(exts)
285
+ ext = exts[0]
286
+ self.assertIsInstance(ext, Extension)
287
+
288
+ # check_extensions_list adds in ext the values passed
289
+ # when they are in ('include_dirs', 'library_dirs', 'libraries'
290
+ # 'extra_objects', 'extra_compile_args', 'extra_link_args')
291
+ self.assertEqual(ext.libraries, 'foo')
292
+ self.assertFalse(hasattr(ext, 'some'))
293
+
294
+ # 'macros' element of build info dict must be 1- or 2-tuple
295
+ exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
296
+ 'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})]
297
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
298
+
299
+ exts[0][1]['macros'] = [('1', '2'), ('3',)]
300
+ cmd.check_extensions_list(exts)
301
+ self.assertEqual(exts[0].undef_macros, ['3'])
302
+ self.assertEqual(exts[0].define_macros, [('1', '2')])
303
+
304
+ def test_get_source_files(self):
305
+ modules = [Extension('foo', ['xxx'], optional=False)]
306
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
307
+ cmd = self.build_ext(dist)
308
+ cmd.ensure_finalized()
309
+ self.assertEqual(cmd.get_source_files(), ['xxx'])
310
+
311
+ def test_unicode_module_names(self):
312
+ modules = [
313
+ Extension('foo', ['aaa'], optional=False),
314
+ Extension('föö', ['uuu'], optional=False),
315
+ ]
316
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
317
+ cmd = self.build_ext(dist)
318
+ cmd.ensure_finalized()
319
+ self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*')
320
+ self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*')
321
+ self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo'])
322
+ self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_gkaa'])
323
+
324
+ def test_compiler_option(self):
325
+ # cmd.compiler is an option and
326
+ # should not be overridden by a compiler instance
327
+ # when the command is run
328
+ dist = Distribution()
329
+ cmd = self.build_ext(dist)
330
+ cmd.compiler = 'unix'
331
+ cmd.ensure_finalized()
332
+ cmd.run()
333
+ self.assertEqual(cmd.compiler, 'unix')
334
+
335
+ def test_get_outputs(self):
336
+ cmd = support.missing_compiler_executable()
337
+ if cmd is not None:
338
+ self.skipTest('The %r command is not found' % cmd)
339
+ tmp_dir = self.mkdtemp()
340
+ c_file = os.path.join(tmp_dir, 'foo.c')
341
+ self.write_file(c_file, 'void PyInit_foo(void) {}\n')
342
+ ext = Extension('foo', [c_file], optional=False)
343
+ dist = Distribution({'name': 'xx',
344
+ 'ext_modules': [ext]})
345
+ cmd = self.build_ext(dist)
346
+ fixup_build_ext(cmd)
347
+ cmd.ensure_finalized()
348
+ self.assertEqual(len(cmd.get_outputs()), 1)
349
+
350
+ cmd.build_lib = os.path.join(self.tmp_dir, 'build')
351
+ cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
352
+
353
+ # issue #5977 : distutils build_ext.get_outputs
354
+ # returns wrong result with --inplace
355
+ other_tmp_dir = os.path.realpath(self.mkdtemp())
356
+ old_wd = os.getcwd()
357
+ os.chdir(other_tmp_dir)
358
+ try:
359
+ cmd.inplace = 1
360
+ cmd.run()
361
+ so_file = cmd.get_outputs()[0]
362
+ finally:
363
+ os.chdir(old_wd)
364
+ self.assertTrue(os.path.exists(so_file))
365
+ ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
366
+ self.assertTrue(so_file.endswith(ext_suffix))
367
+ so_dir = os.path.dirname(so_file)
368
+ self.assertEqual(so_dir, other_tmp_dir)
369
+
370
+ cmd.inplace = 0
371
+ cmd.compiler = None
372
+ cmd.run()
373
+ so_file = cmd.get_outputs()[0]
374
+ self.assertTrue(os.path.exists(so_file))
375
+ self.assertTrue(so_file.endswith(ext_suffix))
376
+ so_dir = os.path.dirname(so_file)
377
+ self.assertEqual(so_dir, cmd.build_lib)
378
+
379
+ # inplace = 0, cmd.package = 'bar'
380
+ build_py = cmd.get_finalized_command('build_py')
381
+ build_py.package_dir = {'': 'bar'}
382
+ path = cmd.get_ext_fullpath('foo')
383
+ # checking that the last directory is the build_dir
384
+ path = os.path.split(path)[0]
385
+ self.assertEqual(path, cmd.build_lib)
386
+
387
+ # inplace = 1, cmd.package = 'bar'
388
+ cmd.inplace = 1
389
+ other_tmp_dir = os.path.realpath(self.mkdtemp())
390
+ old_wd = os.getcwd()
391
+ os.chdir(other_tmp_dir)
392
+ try:
393
+ path = cmd.get_ext_fullpath('foo')
394
+ finally:
395
+ os.chdir(old_wd)
396
+ # checking that the last directory is bar
397
+ path = os.path.split(path)[0]
398
+ lastdir = os.path.split(path)[-1]
399
+ self.assertEqual(lastdir, 'bar')
400
+
401
+ def test_ext_fullpath(self):
402
+ ext = sysconfig.get_config_var('EXT_SUFFIX')
403
+ # building lxml.etree inplace
404
+ #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
405
+ #etree_ext = Extension('lxml.etree', [etree_c])
406
+ #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
407
+ dist = Distribution()
408
+ cmd = self.build_ext(dist)
409
+ cmd.inplace = 1
410
+ cmd.distribution.package_dir = {'': 'src'}
411
+ cmd.distribution.packages = ['lxml', 'lxml.html']
412
+ curdir = os.getcwd()
413
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
414
+ path = cmd.get_ext_fullpath('lxml.etree')
415
+ self.assertEqual(wanted, path)
416
+
417
+ # building lxml.etree not inplace
418
+ cmd.inplace = 0
419
+ cmd.build_lib = os.path.join(curdir, 'tmpdir')
420
+ wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
421
+ path = cmd.get_ext_fullpath('lxml.etree')
422
+ self.assertEqual(wanted, path)
423
+
424
+ # building twisted.runner.portmap not inplace
425
+ build_py = cmd.get_finalized_command('build_py')
426
+ build_py.package_dir = {}
427
+ cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
428
+ path = cmd.get_ext_fullpath('twisted.runner.portmap')
429
+ wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
430
+ 'portmap' + ext)
431
+ self.assertEqual(wanted, path)
432
+
433
+ # building twisted.runner.portmap inplace
434
+ cmd.inplace = 1
435
+ path = cmd.get_ext_fullpath('twisted.runner.portmap')
436
+ wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
437
+ self.assertEqual(wanted, path)
438
+
439
+
440
+ @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
441
+ def test_deployment_target_default(self):
442
+ # Issue 9516: Test that, in the absence of the environment variable,
443
+ # an extension module is compiled with the same deployment target as
444
+ # the interpreter.
445
+ self._try_compile_deployment_target('==', None)
446
+
447
+ @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
448
+ def test_deployment_target_too_low(self):
449
+ # Issue 9516: Test that an extension module is not allowed to be
450
+ # compiled with a deployment target less than that of the interpreter.
451
+ self.assertRaises(DistutilsPlatformError,
452
+ self._try_compile_deployment_target, '>', '10.1')
453
+
454
+ @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
455
+ def test_deployment_target_higher_ok(self):
456
+ # Issue 9516: Test that an extension module can be compiled with a
457
+ # deployment target higher than that of the interpreter: the ext
458
+ # module may depend on some newer OS feature.
459
+ deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
460
+ if deptarget:
461
+ # increment the minor version number (i.e. 10.6 -> 10.7)
462
+ deptarget = [int(x) for x in deptarget.split('.')]
463
+ deptarget[-1] += 1
464
+ deptarget = '.'.join(str(i) for i in deptarget)
465
+ self._try_compile_deployment_target('<', deptarget)
466
+
467
+ def _try_compile_deployment_target(self, operator, target):
468
+ orig_environ = os.environ
469
+ os.environ = orig_environ.copy()
470
+ self.addCleanup(setattr, os, 'environ', orig_environ)
471
+
472
+ if target is None:
473
+ if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
474
+ del os.environ['MACOSX_DEPLOYMENT_TARGET']
475
+ else:
476
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
477
+
478
+ deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')
479
+
480
+ with open(deptarget_c, 'w') as fp:
481
+ fp.write(textwrap.dedent('''\
482
+ #include <AvailabilityMacros.h>
483
+
484
+ int dummy;
485
+
486
+ #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
487
+ #else
488
+ #error "Unexpected target"
489
+ #endif
490
+
491
+ ''' % operator))
492
+
493
+ # get the deployment target that the interpreter was built with
494
+ target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
495
+ target = tuple(map(int, target.split('.')[0:2]))
496
+ # format the target value as defined in the Apple
497
+ # Availability Macros. We can't use the macro names since
498
+ # at least one value we test with will not exist yet.
499
+ if target[:2] < (10, 10):
500
+ # for 10.1 through 10.9.x -> "10n0"
501
+ target = '%02d%01d0' % target
502
+ else:
503
+ # for 10.10 and beyond -> "10nn00"
504
+ if len(target) >= 2:
505
+ target = '%02d%02d00' % target
506
+ else:
507
+ # 11 and later can have no minor version (11 instead of 11.0)
508
+ target = '%02d0000' % target
509
+ deptarget_ext = Extension(
510
+ 'deptarget',
511
+ [deptarget_c],
512
+ extra_compile_args=['-DTARGET=%s'%(target,)],
513
+ )
514
+ dist = Distribution({
515
+ 'name': 'deptarget',
516
+ 'ext_modules': [deptarget_ext]
517
+ })
518
+ dist.package_dir = self.tmp_dir
519
+ cmd = self.build_ext(dist)
520
+ cmd.build_lib = self.tmp_dir
521
+ cmd.build_temp = self.tmp_dir
522
+
523
+ try:
524
+ old_stdout = sys.stdout
525
+ if not support.verbose:
526
+ # silence compiler output
527
+ sys.stdout = StringIO()
528
+ try:
529
+ cmd.ensure_finalized()
530
+ cmd.run()
531
+ finally:
532
+ sys.stdout = old_stdout
533
+
534
+ except CompileError:
535
+ self.fail("Wrong deployment target during compilation")
536
+
537
+
538
+ class ParallelBuildExtTestCase(BuildExtTestCase):
539
+
540
+ def build_ext(self, *args, **kwargs):
541
+ build_ext = super().build_ext(*args, **kwargs)
542
+ build_ext.parallel = True
543
+ return build_ext
544
+
545
+
546
+ def test_suite():
547
+ suite = unittest.TestSuite()
548
+ suite.addTest(unittest.makeSuite(BuildExtTestCase))
549
+ suite.addTest(unittest.makeSuite(ParallelBuildExtTestCase))
550
+ return suite
551
+
552
+ if __name__ == '__main__':
553
+ support.run_unittest(__name__)
janus/lib/python3.10/distutils/tests/test_build_scripts.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils.command.build_scripts."""
2
+
3
+ import os
4
+ import unittest
5
+
6
+ from distutils.command.build_scripts import build_scripts
7
+ from distutils.core import Distribution
8
+ from distutils import sysconfig
9
+
10
+ from distutils.tests import support
11
+ from test.support import run_unittest
12
+
13
+
14
+ class BuildScriptsTestCase(support.TempdirManager,
15
+ support.LoggingSilencer,
16
+ unittest.TestCase):
17
+
18
+ def test_default_settings(self):
19
+ cmd = self.get_build_scripts_cmd("/foo/bar", [])
20
+ self.assertFalse(cmd.force)
21
+ self.assertIsNone(cmd.build_dir)
22
+
23
+ cmd.finalize_options()
24
+
25
+ self.assertTrue(cmd.force)
26
+ self.assertEqual(cmd.build_dir, "/foo/bar")
27
+
28
+ def test_build(self):
29
+ source = self.mkdtemp()
30
+ target = self.mkdtemp()
31
+ expected = self.write_sample_scripts(source)
32
+
33
+ cmd = self.get_build_scripts_cmd(target,
34
+ [os.path.join(source, fn)
35
+ for fn in expected])
36
+ cmd.finalize_options()
37
+ cmd.run()
38
+
39
+ built = os.listdir(target)
40
+ for name in expected:
41
+ self.assertIn(name, built)
42
+
43
+ def get_build_scripts_cmd(self, target, scripts):
44
+ import sys
45
+ dist = Distribution()
46
+ dist.scripts = scripts
47
+ dist.command_obj["build"] = support.DummyCommand(
48
+ build_scripts=target,
49
+ force=1,
50
+ executable=sys.executable
51
+ )
52
+ return build_scripts(dist)
53
+
54
+ def write_sample_scripts(self, dir):
55
+ expected = []
56
+ expected.append("script1.py")
57
+ self.write_script(dir, "script1.py",
58
+ ("#! /usr/bin/env python2.3\n"
59
+ "# bogus script w/ Python sh-bang\n"
60
+ "pass\n"))
61
+ expected.append("script2.py")
62
+ self.write_script(dir, "script2.py",
63
+ ("#!/usr/bin/python\n"
64
+ "# bogus script w/ Python sh-bang\n"
65
+ "pass\n"))
66
+ expected.append("shell.sh")
67
+ self.write_script(dir, "shell.sh",
68
+ ("#!/bin/sh\n"
69
+ "# bogus shell script w/ sh-bang\n"
70
+ "exit 0\n"))
71
+ return expected
72
+
73
+ def write_script(self, dir, name, text):
74
+ f = open(os.path.join(dir, name), "w")
75
+ try:
76
+ f.write(text)
77
+ finally:
78
+ f.close()
79
+
80
+ def test_version_int(self):
81
+ source = self.mkdtemp()
82
+ target = self.mkdtemp()
83
+ expected = self.write_sample_scripts(source)
84
+
85
+
86
+ cmd = self.get_build_scripts_cmd(target,
87
+ [os.path.join(source, fn)
88
+ for fn in expected])
89
+ cmd.finalize_options()
90
+
91
+ # http://bugs.python.org/issue4524
92
+ #
93
+ # On linux-g++-32 with command line `./configure --enable-ipv6
94
+ # --with-suffix=3`, python is compiled okay but the build scripts
95
+ # failed when writing the name of the executable
96
+ old = sysconfig.get_config_vars().get('VERSION')
97
+ sysconfig._config_vars['VERSION'] = 4
98
+ try:
99
+ cmd.run()
100
+ finally:
101
+ if old is not None:
102
+ sysconfig._config_vars['VERSION'] = old
103
+
104
+ built = os.listdir(target)
105
+ for name in expected:
106
+ self.assertIn(name, built)
107
+
108
+ def test_suite():
109
+ return unittest.makeSuite(BuildScriptsTestCase)
110
+
111
+ if __name__ == "__main__":
112
+ run_unittest(test_suite())
janus/lib/python3.10/distutils/tests/test_config_cmd.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils.command.config."""
2
+ import unittest
3
+ import os
4
+ import sys
5
+ import sysconfig
6
+ from test.support import run_unittest, missing_compiler_executable
7
+
8
+ from distutils.command.config import dump_file, config
9
+ from distutils.tests import support
10
+ from distutils import log
11
+
12
+ class ConfigTestCase(support.LoggingSilencer,
13
+ support.TempdirManager,
14
+ unittest.TestCase):
15
+
16
+ def _info(self, msg, *args):
17
+ for line in msg.splitlines():
18
+ self._logs.append(line)
19
+
20
+ def setUp(self):
21
+ super(ConfigTestCase, self).setUp()
22
+ self._logs = []
23
+ self.old_log = log.info
24
+ log.info = self._info
25
+ self.old_config_vars = dict(sysconfig._CONFIG_VARS)
26
+
27
+ def tearDown(self):
28
+ log.info = self.old_log
29
+ sysconfig._CONFIG_VARS.clear()
30
+ sysconfig._CONFIG_VARS.update(self.old_config_vars)
31
+ super(ConfigTestCase, self).tearDown()
32
+
33
+ def test_dump_file(self):
34
+ this_file = os.path.splitext(__file__)[0] + '.py'
35
+ f = open(this_file)
36
+ try:
37
+ numlines = len(f.readlines())
38
+ finally:
39
+ f.close()
40
+
41
+ dump_file(this_file, 'I am the header')
42
+ self.assertEqual(len(self._logs), numlines+1)
43
+
44
+ @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
45
+ def test_search_cpp(self):
46
+ cmd = missing_compiler_executable(['preprocessor'])
47
+ if cmd is not None:
48
+ self.skipTest('The %r command is not found' % cmd)
49
+ pkg_dir, dist = self.create_dist()
50
+ cmd = config(dist)
51
+ cmd._check_compiler()
52
+ compiler = cmd.compiler
53
+ if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower():
54
+ self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options')
55
+
56
+ # simple pattern searches
57
+ match = cmd.search_cpp(pattern='xxx', body='/* xxx */')
58
+ self.assertEqual(match, 0)
59
+
60
+ match = cmd.search_cpp(pattern='_configtest', body='/* xxx */')
61
+ self.assertEqual(match, 1)
62
+
63
+ def test_finalize_options(self):
64
+ # finalize_options does a bit of transformation
65
+ # on options
66
+ pkg_dir, dist = self.create_dist()
67
+ cmd = config(dist)
68
+ cmd.include_dirs = 'one%stwo' % os.pathsep
69
+ cmd.libraries = 'one'
70
+ cmd.library_dirs = 'three%sfour' % os.pathsep
71
+ cmd.ensure_finalized()
72
+
73
+ self.assertEqual(cmd.include_dirs, ['one', 'two'])
74
+ self.assertEqual(cmd.libraries, ['one'])
75
+ self.assertEqual(cmd.library_dirs, ['three', 'four'])
76
+
77
+ def test_clean(self):
78
+ # _clean removes files
79
+ tmp_dir = self.mkdtemp()
80
+ f1 = os.path.join(tmp_dir, 'one')
81
+ f2 = os.path.join(tmp_dir, 'two')
82
+
83
+ self.write_file(f1, 'xxx')
84
+ self.write_file(f2, 'xxx')
85
+
86
+ for f in (f1, f2):
87
+ self.assertTrue(os.path.exists(f))
88
+
89
+ pkg_dir, dist = self.create_dist()
90
+ cmd = config(dist)
91
+ cmd._clean(f1, f2)
92
+
93
+ for f in (f1, f2):
94
+ self.assertFalse(os.path.exists(f))
95
+
96
+ def test_suite():
97
+ return unittest.makeSuite(ConfigTestCase)
98
+
99
+ if __name__ == "__main__":
100
+ run_unittest(test_suite())
janus/lib/python3.10/distutils/tests/test_dir_util.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils.dir_util."""
2
+ import unittest
3
+ import os
4
+ import stat
5
+ import sys
6
+ from unittest.mock import patch
7
+
8
+ from distutils import dir_util, errors
9
+ from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree,
10
+ ensure_relative)
11
+
12
+ from distutils import log
13
+ from distutils.tests import support
14
+ from test.support import run_unittest
15
+
16
+
17
+ class DirUtilTestCase(support.TempdirManager, unittest.TestCase):
18
+
19
+ def _log(self, msg, *args):
20
+ if len(args) > 0:
21
+ self._logs.append(msg % args)
22
+ else:
23
+ self._logs.append(msg)
24
+
25
+ def setUp(self):
26
+ super(DirUtilTestCase, self).setUp()
27
+ self._logs = []
28
+ tmp_dir = self.mkdtemp()
29
+ self.root_target = os.path.join(tmp_dir, 'deep')
30
+ self.target = os.path.join(self.root_target, 'here')
31
+ self.target2 = os.path.join(tmp_dir, 'deep2')
32
+ self.old_log = log.info
33
+ log.info = self._log
34
+
35
+ def tearDown(self):
36
+ log.info = self.old_log
37
+ super(DirUtilTestCase, self).tearDown()
38
+
39
+ def test_mkpath_remove_tree_verbosity(self):
40
+
41
+ mkpath(self.target, verbose=0)
42
+ wanted = []
43
+ self.assertEqual(self._logs, wanted)
44
+ remove_tree(self.root_target, verbose=0)
45
+
46
+ mkpath(self.target, verbose=1)
47
+ wanted = ['creating %s' % self.root_target,
48
+ 'creating %s' % self.target]
49
+ self.assertEqual(self._logs, wanted)
50
+ self._logs = []
51
+
52
+ remove_tree(self.root_target, verbose=1)
53
+ wanted = ["removing '%s' (and everything under it)" % self.root_target]
54
+ self.assertEqual(self._logs, wanted)
55
+
56
+ @unittest.skipIf(sys.platform.startswith('win'),
57
+ "This test is only appropriate for POSIX-like systems.")
58
+ def test_mkpath_with_custom_mode(self):
59
+ # Get and set the current umask value for testing mode bits.
60
+ umask = os.umask(0o002)
61
+ os.umask(umask)
62
+ mkpath(self.target, 0o700)
63
+ self.assertEqual(
64
+ stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask)
65
+ mkpath(self.target2, 0o555)
66
+ self.assertEqual(
67
+ stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask)
68
+
69
+ def test_create_tree_verbosity(self):
70
+
71
+ create_tree(self.root_target, ['one', 'two', 'three'], verbose=0)
72
+ self.assertEqual(self._logs, [])
73
+ remove_tree(self.root_target, verbose=0)
74
+
75
+ wanted = ['creating %s' % self.root_target]
76
+ create_tree(self.root_target, ['one', 'two', 'three'], verbose=1)
77
+ self.assertEqual(self._logs, wanted)
78
+
79
+ remove_tree(self.root_target, verbose=0)
80
+
81
+ def test_copy_tree_verbosity(self):
82
+
83
+ mkpath(self.target, verbose=0)
84
+
85
+ copy_tree(self.target, self.target2, verbose=0)
86
+ self.assertEqual(self._logs, [])
87
+
88
+ remove_tree(self.root_target, verbose=0)
89
+
90
+ mkpath(self.target, verbose=0)
91
+ a_file = os.path.join(self.target, 'ok.txt')
92
+ with open(a_file, 'w') as f:
93
+ f.write('some content')
94
+
95
+ wanted = ['copying %s -> %s' % (a_file, self.target2)]
96
+ copy_tree(self.target, self.target2, verbose=1)
97
+ self.assertEqual(self._logs, wanted)
98
+
99
+ remove_tree(self.root_target, verbose=0)
100
+ remove_tree(self.target2, verbose=0)
101
+
102
+ def test_copy_tree_skips_nfs_temp_files(self):
103
+ mkpath(self.target, verbose=0)
104
+
105
+ a_file = os.path.join(self.target, 'ok.txt')
106
+ nfs_file = os.path.join(self.target, '.nfs123abc')
107
+ for f in a_file, nfs_file:
108
+ with open(f, 'w') as fh:
109
+ fh.write('some content')
110
+
111
+ copy_tree(self.target, self.target2)
112
+ self.assertEqual(os.listdir(self.target2), ['ok.txt'])
113
+
114
+ remove_tree(self.root_target, verbose=0)
115
+ remove_tree(self.target2, verbose=0)
116
+
117
+ def test_ensure_relative(self):
118
+ if os.sep == '/':
119
+ self.assertEqual(ensure_relative('/home/foo'), 'home/foo')
120
+ self.assertEqual(ensure_relative('some/path'), 'some/path')
121
+ else: # \\
122
+ self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo')
123
+ self.assertEqual(ensure_relative('home\\foo'), 'home\\foo')
124
+
125
+ def test_copy_tree_exception_in_listdir(self):
126
+ """
127
+ An exception in listdir should raise a DistutilsFileError
128
+ """
129
+ with patch("os.listdir", side_effect=OSError()), \
130
+ self.assertRaises(errors.DistutilsFileError):
131
+ src = self.tempdirs[-1]
132
+ dir_util.copy_tree(src, None)
133
+
134
+
135
+ def test_suite():
136
+ return unittest.makeSuite(DirUtilTestCase)
137
+
138
+ if __name__ == "__main__":
139
+ run_unittest(test_suite())
janus/lib/python3.10/distutils/tests/test_register.py ADDED
@@ -0,0 +1,324 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils.command.register."""
2
+ import os
3
+ import unittest
4
+ import getpass
5
+ import urllib
6
+ import warnings
7
+
8
+ from test.support import run_unittest
9
+ from test.support.warnings_helper import check_warnings
10
+
11
+ from distutils.command import register as register_module
12
+ from distutils.command.register import register
13
+ from distutils.errors import DistutilsSetupError
14
+ from distutils.log import INFO
15
+
16
+ from distutils.tests.test_config import BasePyPIRCCommandTestCase
17
+
18
+ try:
19
+ import docutils
20
+ except ImportError:
21
+ docutils = None
22
+
23
+ PYPIRC_NOPASSWORD = """\
24
+ [distutils]
25
+
26
+ index-servers =
27
+ server1
28
+
29
+ [server1]
30
+ username:me
31
+ """
32
+
33
+ WANTED_PYPIRC = """\
34
+ [distutils]
35
+ index-servers =
36
+ pypi
37
+
38
+ [pypi]
39
+ username:tarek
40
+ password:password
41
+ """
42
+
43
+ class Inputs(object):
44
+ """Fakes user inputs."""
45
+ def __init__(self, *answers):
46
+ self.answers = answers
47
+ self.index = 0
48
+
49
+ def __call__(self, prompt=''):
50
+ try:
51
+ return self.answers[self.index]
52
+ finally:
53
+ self.index += 1
54
+
55
+ class FakeOpener(object):
56
+ """Fakes a PyPI server"""
57
+ def __init__(self):
58
+ self.reqs = []
59
+
60
+ def __call__(self, *args):
61
+ return self
62
+
63
+ def open(self, req, data=None, timeout=None):
64
+ self.reqs.append(req)
65
+ return self
66
+
67
+ def read(self):
68
+ return b'xxx'
69
+
70
+ def getheader(self, name, default=None):
71
+ return {
72
+ 'content-type': 'text/plain; charset=utf-8',
73
+ }.get(name.lower(), default)
74
+
75
+
76
+ class RegisterTestCase(BasePyPIRCCommandTestCase):
77
+
78
+ def setUp(self):
79
+ super(RegisterTestCase, self).setUp()
80
+ # patching the password prompt
81
+ self._old_getpass = getpass.getpass
82
+ def _getpass(prompt):
83
+ return 'password'
84
+ getpass.getpass = _getpass
85
+ urllib.request._opener = None
86
+ self.old_opener = urllib.request.build_opener
87
+ self.conn = urllib.request.build_opener = FakeOpener()
88
+
89
+ def tearDown(self):
90
+ getpass.getpass = self._old_getpass
91
+ urllib.request._opener = None
92
+ urllib.request.build_opener = self.old_opener
93
+ super(RegisterTestCase, self).tearDown()
94
+
95
+ def _get_cmd(self, metadata=None):
96
+ if metadata is None:
97
+ metadata = {'url': 'xxx', 'author': 'xxx',
98
+ 'author_email': 'xxx',
99
+ 'name': 'xxx', 'version': 'xxx'}
100
+ pkg_info, dist = self.create_dist(**metadata)
101
+ return register(dist)
102
+
103
+ def test_create_pypirc(self):
104
+ # this test makes sure a .pypirc file
105
+ # is created when requested.
106
+
107
+ # let's create a register instance
108
+ cmd = self._get_cmd()
109
+
110
+ # we shouldn't have a .pypirc file yet
111
+ self.assertFalse(os.path.exists(self.rc))
112
+
113
+ # patching input and getpass.getpass
114
+ # so register gets happy
115
+ #
116
+ # Here's what we are faking :
117
+ # use your existing login (choice 1.)
118
+ # Username : 'tarek'
119
+ # Password : 'password'
120
+ # Save your login (y/N)? : 'y'
121
+ inputs = Inputs('1', 'tarek', 'y')
122
+ register_module.input = inputs.__call__
123
+ # let's run the command
124
+ try:
125
+ cmd.run()
126
+ finally:
127
+ del register_module.input
128
+
129
+ # we should have a brand new .pypirc file
130
+ self.assertTrue(os.path.exists(self.rc))
131
+
132
+ # with the content similar to WANTED_PYPIRC
133
+ f = open(self.rc)
134
+ try:
135
+ content = f.read()
136
+ self.assertEqual(content, WANTED_PYPIRC)
137
+ finally:
138
+ f.close()
139
+
140
+ # now let's make sure the .pypirc file generated
141
+ # really works : we shouldn't be asked anything
142
+ # if we run the command again
143
+ def _no_way(prompt=''):
144
+ raise AssertionError(prompt)
145
+ register_module.input = _no_way
146
+
147
+ cmd.show_response = 1
148
+ cmd.run()
149
+
150
+ # let's see what the server received : we should
151
+ # have 2 similar requests
152
+ self.assertEqual(len(self.conn.reqs), 2)
153
+ req1 = dict(self.conn.reqs[0].headers)
154
+ req2 = dict(self.conn.reqs[1].headers)
155
+
156
+ self.assertEqual(req1['Content-length'], '1374')
157
+ self.assertEqual(req2['Content-length'], '1374')
158
+ self.assertIn(b'xxx', self.conn.reqs[1].data)
159
+
160
+ def test_password_not_in_file(self):
161
+
162
+ self.write_file(self.rc, PYPIRC_NOPASSWORD)
163
+ cmd = self._get_cmd()
164
+ cmd._set_config()
165
+ cmd.finalize_options()
166
+ cmd.send_metadata()
167
+
168
+ # dist.password should be set
169
+ # therefore used afterwards by other commands
170
+ self.assertEqual(cmd.distribution.password, 'password')
171
+
172
+ def test_registering(self):
173
+ # this test runs choice 2
174
+ cmd = self._get_cmd()
175
+ inputs = Inputs('2', 'tarek', '[email protected]')
176
+ register_module.input = inputs.__call__
177
+ try:
178
+ # let's run the command
179
+ cmd.run()
180
+ finally:
181
+ del register_module.input
182
+
183
+ # we should have send a request
184
+ self.assertEqual(len(self.conn.reqs), 1)
185
+ req = self.conn.reqs[0]
186
+ headers = dict(req.headers)
187
+ self.assertEqual(headers['Content-length'], '608')
188
+ self.assertIn(b'tarek', req.data)
189
+
190
+ def test_password_reset(self):
191
+ # this test runs choice 3
192
+ cmd = self._get_cmd()
193
+ inputs = Inputs('3', '[email protected]')
194
+ register_module.input = inputs.__call__
195
+ try:
196
+ # let's run the command
197
+ cmd.run()
198
+ finally:
199
+ del register_module.input
200
+
201
+ # we should have send a request
202
+ self.assertEqual(len(self.conn.reqs), 1)
203
+ req = self.conn.reqs[0]
204
+ headers = dict(req.headers)
205
+ self.assertEqual(headers['Content-length'], '290')
206
+ self.assertIn(b'tarek', req.data)
207
+
208
+ @unittest.skipUnless(docutils is not None, 'needs docutils')
209
+ def test_strict(self):
210
+ # testing the script option
211
+ # when on, the register command stops if
212
+ # the metadata is incomplete or if
213
+ # long_description is not reSt compliant
214
+
215
+ # empty metadata
216
+ cmd = self._get_cmd({})
217
+ cmd.ensure_finalized()
218
+ cmd.strict = 1
219
+ self.assertRaises(DistutilsSetupError, cmd.run)
220
+
221
+ # metadata are OK but long_description is broken
222
+ metadata = {'url': 'xxx', 'author': 'xxx',
223
+ 'author_email': 'éxéxé',
224
+ 'name': 'xxx', 'version': 'xxx',
225
+ 'long_description': 'title\n==\n\ntext'}
226
+
227
+ cmd = self._get_cmd(metadata)
228
+ cmd.ensure_finalized()
229
+ cmd.strict = 1
230
+ self.assertRaises(DistutilsSetupError, cmd.run)
231
+
232
+ # now something that works
233
+ metadata['long_description'] = 'title\n=====\n\ntext'
234
+ cmd = self._get_cmd(metadata)
235
+ cmd.ensure_finalized()
236
+ cmd.strict = 1
237
+ inputs = Inputs('1', 'tarek', 'y')
238
+ register_module.input = inputs.__call__
239
+ # let's run the command
240
+ try:
241
+ cmd.run()
242
+ finally:
243
+ del register_module.input
244
+
245
+ # strict is not by default
246
+ cmd = self._get_cmd()
247
+ cmd.ensure_finalized()
248
+ inputs = Inputs('1', 'tarek', 'y')
249
+ register_module.input = inputs.__call__
250
+ # let's run the command
251
+ try:
252
+ cmd.run()
253
+ finally:
254
+ del register_module.input
255
+
256
+ # and finally a Unicode test (bug #12114)
257
+ metadata = {'url': 'xxx', 'author': '\u00c9ric',
258
+ 'author_email': 'xxx', 'name': 'xxx',
259
+ 'version': 'xxx',
260
+ 'description': 'Something about esszet \u00df',
261
+ 'long_description': 'More things about esszet \u00df'}
262
+
263
+ cmd = self._get_cmd(metadata)
264
+ cmd.ensure_finalized()
265
+ cmd.strict = 1
266
+ inputs = Inputs('1', 'tarek', 'y')
267
+ register_module.input = inputs.__call__
268
+ # let's run the command
269
+ try:
270
+ cmd.run()
271
+ finally:
272
+ del register_module.input
273
+
274
+ @unittest.skipUnless(docutils is not None, 'needs docutils')
275
+ def test_register_invalid_long_description(self):
276
+ description = ':funkie:`str`' # mimic Sphinx-specific markup
277
+ metadata = {'url': 'xxx', 'author': 'xxx',
278
+ 'author_email': 'xxx',
279
+ 'name': 'xxx', 'version': 'xxx',
280
+ 'long_description': description}
281
+ cmd = self._get_cmd(metadata)
282
+ cmd.ensure_finalized()
283
+ cmd.strict = True
284
+ inputs = Inputs('2', 'tarek', '[email protected]')
285
+ register_module.input = inputs
286
+ self.addCleanup(delattr, register_module, 'input')
287
+
288
+ self.assertRaises(DistutilsSetupError, cmd.run)
289
+
290
+ def test_check_metadata_deprecated(self):
291
+ # makes sure make_metadata is deprecated
292
+ cmd = self._get_cmd()
293
+ with check_warnings() as w:
294
+ warnings.simplefilter("always")
295
+ cmd.check_metadata()
296
+ self.assertEqual(len(w.warnings), 1)
297
+
298
+ def test_list_classifiers(self):
299
+ cmd = self._get_cmd()
300
+ cmd.list_classifiers = 1
301
+ cmd.run()
302
+ results = self.get_logs(INFO)
303
+ self.assertEqual(results, ['running check', 'xxx'])
304
+
305
+ def test_show_response(self):
306
+ # test that the --show-response option return a well formatted response
307
+ cmd = self._get_cmd()
308
+ inputs = Inputs('1', 'tarek', 'y')
309
+ register_module.input = inputs.__call__
310
+ cmd.show_response = 1
311
+ try:
312
+ cmd.run()
313
+ finally:
314
+ del register_module.input
315
+
316
+ results = self.get_logs(INFO)
317
+ self.assertEqual(results[3], 75 * '-' + '\nxxx\n' + 75 * '-')
318
+
319
+
320
+ def test_suite():
321
+ return unittest.makeSuite(RegisterTestCase)
322
+
323
+ if __name__ == "__main__":
324
+ run_unittest(test_suite())
janus/lib/python3.10/distutils/tests/test_spawn.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils.spawn."""
2
+ import os
3
+ import stat
4
+ import sys
5
+ import unittest.mock
6
+ from test.support import run_unittest, unix_shell
7
+ from test.support import os_helper
8
+
9
+ from distutils.spawn import find_executable
10
+ from distutils.spawn import spawn
11
+ from distutils.errors import DistutilsExecError
12
+ from distutils.tests import support
13
+
14
+ class SpawnTestCase(support.TempdirManager,
15
+ support.LoggingSilencer,
16
+ unittest.TestCase):
17
+
18
+ @unittest.skipUnless(os.name in ('nt', 'posix'),
19
+ 'Runs only under posix or nt')
20
+ def test_spawn(self):
21
+ tmpdir = self.mkdtemp()
22
+
23
+ # creating something executable
24
+ # through the shell that returns 1
25
+ if sys.platform != 'win32':
26
+ exe = os.path.join(tmpdir, 'foo.sh')
27
+ self.write_file(exe, '#!%s\nexit 1' % unix_shell)
28
+ else:
29
+ exe = os.path.join(tmpdir, 'foo.bat')
30
+ self.write_file(exe, 'exit 1')
31
+
32
+ os.chmod(exe, 0o777)
33
+ self.assertRaises(DistutilsExecError, spawn, [exe])
34
+
35
+ # now something that works
36
+ if sys.platform != 'win32':
37
+ exe = os.path.join(tmpdir, 'foo.sh')
38
+ self.write_file(exe, '#!%s\nexit 0' % unix_shell)
39
+ else:
40
+ exe = os.path.join(tmpdir, 'foo.bat')
41
+ self.write_file(exe, 'exit 0')
42
+
43
+ os.chmod(exe, 0o777)
44
+ spawn([exe]) # should work without any error
45
+
46
+ def test_find_executable(self):
47
+ with os_helper.temp_dir() as tmp_dir:
48
+ # use TESTFN to get a pseudo-unique filename
49
+ program_noeext = os_helper.TESTFN
50
+ # Give the temporary program an ".exe" suffix for all.
51
+ # It's needed on Windows and not harmful on other platforms.
52
+ program = program_noeext + ".exe"
53
+
54
+ filename = os.path.join(tmp_dir, program)
55
+ with open(filename, "wb"):
56
+ pass
57
+ os.chmod(filename, stat.S_IXUSR)
58
+
59
+ # test path parameter
60
+ rv = find_executable(program, path=tmp_dir)
61
+ self.assertEqual(rv, filename)
62
+
63
+ if sys.platform == 'win32':
64
+ # test without ".exe" extension
65
+ rv = find_executable(program_noeext, path=tmp_dir)
66
+ self.assertEqual(rv, filename)
67
+
68
+ # test find in the current directory
69
+ with os_helper.change_cwd(tmp_dir):
70
+ rv = find_executable(program)
71
+ self.assertEqual(rv, program)
72
+
73
+ # test non-existent program
74
+ dont_exist_program = "dontexist_" + program
75
+ rv = find_executable(dont_exist_program , path=tmp_dir)
76
+ self.assertIsNone(rv)
77
+
78
+ # PATH='': no match, except in the current directory
79
+ with os_helper.EnvironmentVarGuard() as env:
80
+ env['PATH'] = ''
81
+ with unittest.mock.patch('distutils.spawn.os.confstr',
82
+ return_value=tmp_dir, create=True), \
83
+ unittest.mock.patch('distutils.spawn.os.defpath',
84
+ tmp_dir):
85
+ rv = find_executable(program)
86
+ self.assertIsNone(rv)
87
+
88
+ # look in current directory
89
+ with os_helper.change_cwd(tmp_dir):
90
+ rv = find_executable(program)
91
+ self.assertEqual(rv, program)
92
+
93
+ # PATH=':': explicitly looks in the current directory
94
+ with os_helper.EnvironmentVarGuard() as env:
95
+ env['PATH'] = os.pathsep
96
+ with unittest.mock.patch('distutils.spawn.os.confstr',
97
+ return_value='', create=True), \
98
+ unittest.mock.patch('distutils.spawn.os.defpath', ''):
99
+ rv = find_executable(program)
100
+ self.assertIsNone(rv)
101
+
102
+ # look in current directory
103
+ with os_helper.change_cwd(tmp_dir):
104
+ rv = find_executable(program)
105
+ self.assertEqual(rv, program)
106
+
107
+ # missing PATH: test os.confstr("CS_PATH") and os.defpath
108
+ with os_helper.EnvironmentVarGuard() as env:
109
+ env.pop('PATH', None)
110
+
111
+ # without confstr
112
+ with unittest.mock.patch('distutils.spawn.os.confstr',
113
+ side_effect=ValueError,
114
+ create=True), \
115
+ unittest.mock.patch('distutils.spawn.os.defpath',
116
+ tmp_dir):
117
+ rv = find_executable(program)
118
+ self.assertEqual(rv, filename)
119
+
120
+ # with confstr
121
+ with unittest.mock.patch('distutils.spawn.os.confstr',
122
+ return_value=tmp_dir, create=True), \
123
+ unittest.mock.patch('distutils.spawn.os.defpath', ''):
124
+ rv = find_executable(program)
125
+ self.assertEqual(rv, filename)
126
+
127
+ def test_spawn_missing_exe(self):
128
+ with self.assertRaises(DistutilsExecError) as ctx:
129
+ spawn(['does-not-exist'])
130
+ self.assertIn("command 'does-not-exist' failed", str(ctx.exception))
131
+
132
+
133
+ def test_suite():
134
+ return unittest.makeSuite(SpawnTestCase)
135
+
136
+ if __name__ == "__main__":
137
+ run_unittest(test_suite())
janus/lib/python3.10/distutils/tests/test_versionpredicate.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests harness for distutils.versionpredicate.
2
+
3
+ """
4
+
5
+ import distutils.versionpredicate
6
+ import doctest
7
+ from test.support import run_unittest
8
+
9
+ def test_suite():
10
+ return doctest.DocTestSuite(distutils.versionpredicate)
11
+
12
+ if __name__ == '__main__':
13
+ run_unittest(test_suite())
janus/lib/python3.10/distutils/versionpredicate.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module for parsing and testing package version predicate strings.
2
+ """
3
+ import re
4
+ import distutils.version
5
+ import operator
6
+
7
+
8
+ re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)",
9
+ re.ASCII)
10
+ # (package) (rest)
11
+
12
+ re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
13
+ re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
14
+ # (comp) (version)
15
+
16
+
17
+ def splitUp(pred):
18
+ """Parse a single version comparison.
19
+
20
+ Return (comparison string, StrictVersion)
21
+ """
22
+ res = re_splitComparison.match(pred)
23
+ if not res:
24
+ raise ValueError("bad package restriction syntax: %r" % pred)
25
+ comp, verStr = res.groups()
26
+ return (comp, distutils.version.StrictVersion(verStr))
27
+
28
+ compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
29
+ ">": operator.gt, ">=": operator.ge, "!=": operator.ne}
30
+
31
+ class VersionPredicate:
32
+ """Parse and test package version predicates.
33
+
34
+ >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
35
+
36
+ The `name` attribute provides the full dotted name that is given::
37
+
38
+ >>> v.name
39
+ 'pyepat.abc'
40
+
41
+ The str() of a `VersionPredicate` provides a normalized
42
+ human-readable version of the expression::
43
+
44
+ >>> print(v)
45
+ pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
46
+
47
+ The `satisfied_by()` method can be used to determine with a given
48
+ version number is included in the set described by the version
49
+ restrictions::
50
+
51
+ >>> v.satisfied_by('1.1')
52
+ True
53
+ >>> v.satisfied_by('1.4')
54
+ True
55
+ >>> v.satisfied_by('1.0')
56
+ False
57
+ >>> v.satisfied_by('4444.4')
58
+ False
59
+ >>> v.satisfied_by('1555.1b3')
60
+ False
61
+
62
+ `VersionPredicate` is flexible in accepting extra whitespace::
63
+
64
+ >>> v = VersionPredicate(' pat( == 0.1 ) ')
65
+ >>> v.name
66
+ 'pat'
67
+ >>> v.satisfied_by('0.1')
68
+ True
69
+ >>> v.satisfied_by('0.2')
70
+ False
71
+
72
+ If any version numbers passed in do not conform to the
73
+ restrictions of `StrictVersion`, a `ValueError` is raised::
74
+
75
+ >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
76
+ Traceback (most recent call last):
77
+ ...
78
+ ValueError: invalid version number '1.2zb3'
79
+
80
+ It the module or package name given does not conform to what's
81
+ allowed as a legal module or package name, `ValueError` is
82
+ raised::
83
+
84
+ >>> v = VersionPredicate('foo-bar')
85
+ Traceback (most recent call last):
86
+ ...
87
+ ValueError: expected parenthesized list: '-bar'
88
+
89
+ >>> v = VersionPredicate('foo bar (12.21)')
90
+ Traceback (most recent call last):
91
+ ...
92
+ ValueError: expected parenthesized list: 'bar (12.21)'
93
+
94
+ """
95
+
96
+ def __init__(self, versionPredicateStr):
97
+ """Parse a version predicate string.
98
+ """
99
+ # Fields:
100
+ # name: package name
101
+ # pred: list of (comparison string, StrictVersion)
102
+
103
+ versionPredicateStr = versionPredicateStr.strip()
104
+ if not versionPredicateStr:
105
+ raise ValueError("empty package restriction")
106
+ match = re_validPackage.match(versionPredicateStr)
107
+ if not match:
108
+ raise ValueError("bad package name in %r" % versionPredicateStr)
109
+ self.name, paren = match.groups()
110
+ paren = paren.strip()
111
+ if paren:
112
+ match = re_paren.match(paren)
113
+ if not match:
114
+ raise ValueError("expected parenthesized list: %r" % paren)
115
+ str = match.groups()[0]
116
+ self.pred = [splitUp(aPred) for aPred in str.split(",")]
117
+ if not self.pred:
118
+ raise ValueError("empty parenthesized list in %r"
119
+ % versionPredicateStr)
120
+ else:
121
+ self.pred = []
122
+
123
+ def __str__(self):
124
+ if self.pred:
125
+ seq = [cond + " " + str(ver) for cond, ver in self.pred]
126
+ return self.name + " (" + ", ".join(seq) + ")"
127
+ else:
128
+ return self.name
129
+
130
+ def satisfied_by(self, version):
131
+ """True if version is compatible with all the predicates in self.
132
+ The parameter version must be acceptable to the StrictVersion
133
+ constructor. It may be either a string or StrictVersion.
134
+ """
135
+ for cond, ver in self.pred:
136
+ if not compmap[cond](version, ver):
137
+ return False
138
+ return True
139
+
140
+
141
+ _provision_rx = None
142
+
143
+ def split_provision(value):
144
+ """Return the name and optional version number of a provision.
145
+
146
+ The version number, if given, will be returned as a `StrictVersion`
147
+ instance, otherwise it will be `None`.
148
+
149
+ >>> split_provision('mypkg')
150
+ ('mypkg', None)
151
+ >>> split_provision(' mypkg( 1.2 ) ')
152
+ ('mypkg', StrictVersion ('1.2'))
153
+ """
154
+ global _provision_rx
155
+ if _provision_rx is None:
156
+ _provision_rx = re.compile(
157
+ r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$",
158
+ re.ASCII)
159
+ value = value.strip()
160
+ m = _provision_rx.match(value)
161
+ if not m:
162
+ raise ValueError("illegal provides specification: %r" % value)
163
+ ver = m.group(2) or None
164
+ if ver:
165
+ ver = distutils.version.StrictVersion(ver)
166
+ return m.group(1), ver
janus/lib/python3.10/glob.py ADDED
@@ -0,0 +1,237 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Filename globbing utility."""
2
+
3
+ import contextlib
4
+ import os
5
+ import re
6
+ import fnmatch
7
+ import itertools
8
+ import stat
9
+ import sys
10
+
11
+ __all__ = ["glob", "iglob", "escape"]
12
+
13
+ def glob(pathname, *, root_dir=None, dir_fd=None, recursive=False):
14
+ """Return a list of paths matching a pathname pattern.
15
+
16
+ The pattern may contain simple shell-style wildcards a la
17
+ fnmatch. However, unlike fnmatch, filenames starting with a
18
+ dot are special cases that are not matched by '*' and '?'
19
+ patterns.
20
+
21
+ If recursive is true, the pattern '**' will match any files and
22
+ zero or more directories and subdirectories.
23
+ """
24
+ return list(iglob(pathname, root_dir=root_dir, dir_fd=dir_fd, recursive=recursive))
25
+
26
+ def iglob(pathname, *, root_dir=None, dir_fd=None, recursive=False):
27
+ """Return an iterator which yields the paths matching a pathname pattern.
28
+
29
+ The pattern may contain simple shell-style wildcards a la
30
+ fnmatch. However, unlike fnmatch, filenames starting with a
31
+ dot are special cases that are not matched by '*' and '?'
32
+ patterns.
33
+
34
+ If recursive is true, the pattern '**' will match any files and
35
+ zero or more directories and subdirectories.
36
+ """
37
+ sys.audit("glob.glob", pathname, recursive)
38
+ sys.audit("glob.glob/2", pathname, recursive, root_dir, dir_fd)
39
+ if root_dir is not None:
40
+ root_dir = os.fspath(root_dir)
41
+ else:
42
+ root_dir = pathname[:0]
43
+ it = _iglob(pathname, root_dir, dir_fd, recursive, False)
44
+ if not pathname or recursive and _isrecursive(pathname[:2]):
45
+ try:
46
+ s = next(it) # skip empty string
47
+ if s:
48
+ it = itertools.chain((s,), it)
49
+ except StopIteration:
50
+ pass
51
+ return it
52
+
53
+ def _iglob(pathname, root_dir, dir_fd, recursive, dironly):
54
+ dirname, basename = os.path.split(pathname)
55
+ if not has_magic(pathname):
56
+ assert not dironly
57
+ if basename:
58
+ if _lexists(_join(root_dir, pathname), dir_fd):
59
+ yield pathname
60
+ else:
61
+ # Patterns ending with a slash should match only directories
62
+ if _isdir(_join(root_dir, dirname), dir_fd):
63
+ yield pathname
64
+ return
65
+ if not dirname:
66
+ if recursive and _isrecursive(basename):
67
+ yield from _glob2(root_dir, basename, dir_fd, dironly)
68
+ else:
69
+ yield from _glob1(root_dir, basename, dir_fd, dironly)
70
+ return
71
+ # `os.path.split()` returns the argument itself as a dirname if it is a
72
+ # drive or UNC path. Prevent an infinite recursion if a drive or UNC path
73
+ # contains magic characters (i.e. r'\\?\C:').
74
+ if dirname != pathname and has_magic(dirname):
75
+ dirs = _iglob(dirname, root_dir, dir_fd, recursive, True)
76
+ else:
77
+ dirs = [dirname]
78
+ if has_magic(basename):
79
+ if recursive and _isrecursive(basename):
80
+ glob_in_dir = _glob2
81
+ else:
82
+ glob_in_dir = _glob1
83
+ else:
84
+ glob_in_dir = _glob0
85
+ for dirname in dirs:
86
+ for name in glob_in_dir(_join(root_dir, dirname), basename, dir_fd, dironly):
87
+ yield os.path.join(dirname, name)
88
+
89
+ # These 2 helper functions non-recursively glob inside a literal directory.
90
+ # They return a list of basenames. _glob1 accepts a pattern while _glob0
91
+ # takes a literal basename (so it only has to check for its existence).
92
+
93
+ def _glob1(dirname, pattern, dir_fd, dironly):
94
+ names = _listdir(dirname, dir_fd, dironly)
95
+ if not _ishidden(pattern):
96
+ names = (x for x in names if not _ishidden(x))
97
+ return fnmatch.filter(names, pattern)
98
+
99
+ def _glob0(dirname, basename, dir_fd, dironly):
100
+ if basename:
101
+ if _lexists(_join(dirname, basename), dir_fd):
102
+ return [basename]
103
+ else:
104
+ # `os.path.split()` returns an empty basename for paths ending with a
105
+ # directory separator. 'q*x/' should match only directories.
106
+ if _isdir(dirname, dir_fd):
107
+ return [basename]
108
+ return []
109
+
110
+ # Following functions are not public but can be used by third-party code.
111
+
112
+ def glob0(dirname, pattern):
113
+ return _glob0(dirname, pattern, None, False)
114
+
115
+ def glob1(dirname, pattern):
116
+ return _glob1(dirname, pattern, None, False)
117
+
118
+ # This helper function recursively yields relative pathnames inside a literal
119
+ # directory.
120
+
121
+ def _glob2(dirname, pattern, dir_fd, dironly):
122
+ assert _isrecursive(pattern)
123
+ yield pattern[:0]
124
+ yield from _rlistdir(dirname, dir_fd, dironly)
125
+
126
+ # If dironly is false, yields all file names inside a directory.
127
+ # If dironly is true, yields only directory names.
128
+ def _iterdir(dirname, dir_fd, dironly):
129
+ try:
130
+ fd = None
131
+ fsencode = None
132
+ if dir_fd is not None:
133
+ if dirname:
134
+ fd = arg = os.open(dirname, _dir_open_flags, dir_fd=dir_fd)
135
+ else:
136
+ arg = dir_fd
137
+ if isinstance(dirname, bytes):
138
+ fsencode = os.fsencode
139
+ elif dirname:
140
+ arg = dirname
141
+ elif isinstance(dirname, bytes):
142
+ arg = bytes(os.curdir, 'ASCII')
143
+ else:
144
+ arg = os.curdir
145
+ try:
146
+ with os.scandir(arg) as it:
147
+ for entry in it:
148
+ try:
149
+ if not dironly or entry.is_dir():
150
+ if fsencode is not None:
151
+ yield fsencode(entry.name)
152
+ else:
153
+ yield entry.name
154
+ except OSError:
155
+ pass
156
+ finally:
157
+ if fd is not None:
158
+ os.close(fd)
159
+ except OSError:
160
+ return
161
+
162
+ def _listdir(dirname, dir_fd, dironly):
163
+ with contextlib.closing(_iterdir(dirname, dir_fd, dironly)) as it:
164
+ return list(it)
165
+
166
+ # Recursively yields relative pathnames inside a literal directory.
167
+ def _rlistdir(dirname, dir_fd, dironly):
168
+ names = _listdir(dirname, dir_fd, dironly)
169
+ for x in names:
170
+ if not _ishidden(x):
171
+ yield x
172
+ path = _join(dirname, x) if dirname else x
173
+ for y in _rlistdir(path, dir_fd, dironly):
174
+ yield _join(x, y)
175
+
176
+
177
+ def _lexists(pathname, dir_fd):
178
+ # Same as os.path.lexists(), but with dir_fd
179
+ if dir_fd is None:
180
+ return os.path.lexists(pathname)
181
+ try:
182
+ os.lstat(pathname, dir_fd=dir_fd)
183
+ except (OSError, ValueError):
184
+ return False
185
+ else:
186
+ return True
187
+
188
+ def _isdir(pathname, dir_fd):
189
+ # Same as os.path.isdir(), but with dir_fd
190
+ if dir_fd is None:
191
+ return os.path.isdir(pathname)
192
+ try:
193
+ st = os.stat(pathname, dir_fd=dir_fd)
194
+ except (OSError, ValueError):
195
+ return False
196
+ else:
197
+ return stat.S_ISDIR(st.st_mode)
198
+
199
+ def _join(dirname, basename):
200
+ # It is common if dirname or basename is empty
201
+ if not dirname or not basename:
202
+ return dirname or basename
203
+ return os.path.join(dirname, basename)
204
+
205
+ magic_check = re.compile('([*?[])')
206
+ magic_check_bytes = re.compile(b'([*?[])')
207
+
208
+ def has_magic(s):
209
+ if isinstance(s, bytes):
210
+ match = magic_check_bytes.search(s)
211
+ else:
212
+ match = magic_check.search(s)
213
+ return match is not None
214
+
215
+ def _ishidden(path):
216
+ return path[0] in ('.', b'.'[0])
217
+
218
+ def _isrecursive(pattern):
219
+ if isinstance(pattern, bytes):
220
+ return pattern == b'**'
221
+ else:
222
+ return pattern == '**'
223
+
224
+ def escape(pathname):
225
+ """Escape all special characters.
226
+ """
227
+ # Escaping is done by wrapping any of "*?[" between square brackets.
228
+ # Metacharacters do not work in the drive part and shouldn't be escaped.
229
+ drive, pathname = os.path.splitdrive(pathname)
230
+ if isinstance(pathname, bytes):
231
+ pathname = magic_check_bytes.sub(br'[\1]', pathname)
232
+ else:
233
+ pathname = magic_check.sub(r'[\1]', pathname)
234
+ return drive + pathname
235
+
236
+
237
+ _dir_open_flags = os.O_RDONLY | getattr(os, 'O_DIRECTORY', 0)
janus/lib/python3.10/graphlib.py ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __all__ = ["TopologicalSorter", "CycleError"]
2
+
3
+ _NODE_OUT = -1
4
+ _NODE_DONE = -2
5
+
6
+
7
+ class _NodeInfo:
8
+ __slots__ = "node", "npredecessors", "successors"
9
+
10
+ def __init__(self, node):
11
+ # The node this class is augmenting.
12
+ self.node = node
13
+
14
+ # Number of predecessors, generally >= 0. When this value falls to 0,
15
+ # and is returned by get_ready(), this is set to _NODE_OUT and when the
16
+ # node is marked done by a call to done(), set to _NODE_DONE.
17
+ self.npredecessors = 0
18
+
19
+ # List of successor nodes. The list can contain duplicated elements as
20
+ # long as they're all reflected in the successor's npredecessors attribute.
21
+ self.successors = []
22
+
23
+
24
+ class CycleError(ValueError):
25
+ """Subclass of ValueError raised by TopologicalSorter.prepare if cycles
26
+ exist in the working graph.
27
+
28
+ If multiple cycles exist, only one undefined choice among them will be reported
29
+ and included in the exception. The detected cycle can be accessed via the second
30
+ element in the *args* attribute of the exception instance and consists in a list
31
+ of nodes, such that each node is, in the graph, an immediate predecessor of the
32
+ next node in the list. In the reported list, the first and the last node will be
33
+ the same, to make it clear that it is cyclic.
34
+ """
35
+
36
+ pass
37
+
38
+
39
+ class TopologicalSorter:
40
+ """Provides functionality to topologically sort a graph of hashable nodes"""
41
+
42
+ def __init__(self, graph=None):
43
+ self._node2info = {}
44
+ self._ready_nodes = None
45
+ self._npassedout = 0
46
+ self._nfinished = 0
47
+
48
+ if graph is not None:
49
+ for node, predecessors in graph.items():
50
+ self.add(node, *predecessors)
51
+
52
+ def _get_nodeinfo(self, node):
53
+ if (result := self._node2info.get(node)) is None:
54
+ self._node2info[node] = result = _NodeInfo(node)
55
+ return result
56
+
57
+ def add(self, node, *predecessors):
58
+ """Add a new node and its predecessors to the graph.
59
+
60
+ Both the *node* and all elements in *predecessors* must be hashable.
61
+
62
+ If called multiple times with the same node argument, the set of dependencies
63
+ will be the union of all dependencies passed in.
64
+
65
+ It is possible to add a node with no dependencies (*predecessors* is not provided)
66
+ as well as provide a dependency twice. If a node that has not been provided before
67
+ is included among *predecessors* it will be automatically added to the graph with
68
+ no predecessors of its own.
69
+
70
+ Raises ValueError if called after "prepare".
71
+ """
72
+ if self._ready_nodes is not None:
73
+ raise ValueError("Nodes cannot be added after a call to prepare()")
74
+
75
+ # Create the node -> predecessor edges
76
+ nodeinfo = self._get_nodeinfo(node)
77
+ nodeinfo.npredecessors += len(predecessors)
78
+
79
+ # Create the predecessor -> node edges
80
+ for pred in predecessors:
81
+ pred_info = self._get_nodeinfo(pred)
82
+ pred_info.successors.append(node)
83
+
84
+ def prepare(self):
85
+ """Mark the graph as finished and check for cycles in the graph.
86
+
87
+ If any cycle is detected, "CycleError" will be raised, but "get_ready" can
88
+ still be used to obtain as many nodes as possible until cycles block more
89
+ progress. After a call to this function, the graph cannot be modified and
90
+ therefore no more nodes can be added using "add".
91
+ """
92
+ if self._ready_nodes is not None:
93
+ raise ValueError("cannot prepare() more than once")
94
+
95
+ self._ready_nodes = [
96
+ i.node for i in self._node2info.values() if i.npredecessors == 0
97
+ ]
98
+ # ready_nodes is set before we look for cycles on purpose:
99
+ # if the user wants to catch the CycleError, that's fine,
100
+ # they can continue using the instance to grab as many
101
+ # nodes as possible before cycles block more progress
102
+ cycle = self._find_cycle()
103
+ if cycle:
104
+ raise CycleError(f"nodes are in a cycle", cycle)
105
+
106
+ def get_ready(self):
107
+ """Return a tuple of all the nodes that are ready.
108
+
109
+ Initially it returns all nodes with no predecessors; once those are marked
110
+ as processed by calling "done", further calls will return all new nodes that
111
+ have all their predecessors already processed. Once no more progress can be made,
112
+ empty tuples are returned.
113
+
114
+ Raises ValueError if called without calling "prepare" previously.
115
+ """
116
+ if self._ready_nodes is None:
117
+ raise ValueError("prepare() must be called first")
118
+
119
+ # Get the nodes that are ready and mark them
120
+ result = tuple(self._ready_nodes)
121
+ n2i = self._node2info
122
+ for node in result:
123
+ n2i[node].npredecessors = _NODE_OUT
124
+
125
+ # Clean the list of nodes that are ready and update
126
+ # the counter of nodes that we have returned.
127
+ self._ready_nodes.clear()
128
+ self._npassedout += len(result)
129
+
130
+ return result
131
+
132
+ def is_active(self):
133
+ """Return ``True`` if more progress can be made and ``False`` otherwise.
134
+
135
+ Progress can be made if cycles do not block the resolution and either there
136
+ are still nodes ready that haven't yet been returned by "get_ready" or the
137
+ number of nodes marked "done" is less than the number that have been returned
138
+ by "get_ready".
139
+
140
+ Raises ValueError if called without calling "prepare" previously.
141
+ """
142
+ if self._ready_nodes is None:
143
+ raise ValueError("prepare() must be called first")
144
+ return self._nfinished < self._npassedout or bool(self._ready_nodes)
145
+
146
+ def __bool__(self):
147
+ return self.is_active()
148
+
149
+ def done(self, *nodes):
150
+ """Marks a set of nodes returned by "get_ready" as processed.
151
+
152
+ This method unblocks any successor of each node in *nodes* for being returned
153
+ in the future by a call to "get_ready".
154
+
155
+ Raises :exec:`ValueError` if any node in *nodes* has already been marked as
156
+ processed by a previous call to this method, if a node was not added to the
157
+ graph by using "add" or if called without calling "prepare" previously or if
158
+ node has not yet been returned by "get_ready".
159
+ """
160
+
161
+ if self._ready_nodes is None:
162
+ raise ValueError("prepare() must be called first")
163
+
164
+ n2i = self._node2info
165
+
166
+ for node in nodes:
167
+
168
+ # Check if we know about this node (it was added previously using add()
169
+ if (nodeinfo := n2i.get(node)) is None:
170
+ raise ValueError(f"node {node!r} was not added using add()")
171
+
172
+ # If the node has not being returned (marked as ready) previously, inform the user.
173
+ stat = nodeinfo.npredecessors
174
+ if stat != _NODE_OUT:
175
+ if stat >= 0:
176
+ raise ValueError(
177
+ f"node {node!r} was not passed out (still not ready)"
178
+ )
179
+ elif stat == _NODE_DONE:
180
+ raise ValueError(f"node {node!r} was already marked done")
181
+ else:
182
+ assert False, f"node {node!r}: unknown status {stat}"
183
+
184
+ # Mark the node as processed
185
+ nodeinfo.npredecessors = _NODE_DONE
186
+
187
+ # Go to all the successors and reduce the number of predecessors, collecting all the ones
188
+ # that are ready to be returned in the next get_ready() call.
189
+ for successor in nodeinfo.successors:
190
+ successor_info = n2i[successor]
191
+ successor_info.npredecessors -= 1
192
+ if successor_info.npredecessors == 0:
193
+ self._ready_nodes.append(successor)
194
+ self._nfinished += 1
195
+
196
+ def _find_cycle(self):
197
+ n2i = self._node2info
198
+ stack = []
199
+ itstack = []
200
+ seen = set()
201
+ node2stacki = {}
202
+
203
+ for node in n2i:
204
+ if node in seen:
205
+ continue
206
+
207
+ while True:
208
+ if node in seen:
209
+ # If we have seen already the node and is in the
210
+ # current stack we have found a cycle.
211
+ if node in node2stacki:
212
+ return stack[node2stacki[node] :] + [node]
213
+ # else go on to get next successor
214
+ else:
215
+ seen.add(node)
216
+ itstack.append(iter(n2i[node].successors).__next__)
217
+ node2stacki[node] = len(stack)
218
+ stack.append(node)
219
+
220
+ # Backtrack to the topmost stack entry with
221
+ # at least another successor.
222
+ while stack:
223
+ try:
224
+ node = itstack[-1]()
225
+ break
226
+ except StopIteration:
227
+ del node2stacki[stack.pop()]
228
+ itstack.pop()
229
+ else:
230
+ break
231
+ return None
232
+
233
+ def static_order(self):
234
+ """Returns an iterable of nodes in a topological order.
235
+
236
+ The particular order that is returned may depend on the specific
237
+ order in which the items were inserted in the graph.
238
+
239
+ Using this method does not require to call "prepare" or "done". If any
240
+ cycle is detected, :exc:`CycleError` will be raised.
241
+ """
242
+ self.prepare()
243
+ while self.is_active():
244
+ node_group = self.get_ready()
245
+ yield from node_group
246
+ self.done(*node_group)
janus/lib/python3.10/keyword.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Keywords (from "Grammar/python.gram")
2
+
3
+ This file is automatically generated; please don't muck it up!
4
+
5
+ To update the symbols in this file, 'cd' to the top directory of
6
+ the python source tree and run:
7
+
8
+ PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \
9
+ Grammar/python.gram \
10
+ Grammar/Tokens \
11
+ Lib/keyword.py
12
+
13
+ Alternatively, you can run 'make regen-keyword'.
14
+ """
15
+
16
+ __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"]
17
+
18
+ kwlist = [
19
+ 'False',
20
+ 'None',
21
+ 'True',
22
+ 'and',
23
+ 'as',
24
+ 'assert',
25
+ 'async',
26
+ 'await',
27
+ 'break',
28
+ 'class',
29
+ 'continue',
30
+ 'def',
31
+ 'del',
32
+ 'elif',
33
+ 'else',
34
+ 'except',
35
+ 'finally',
36
+ 'for',
37
+ 'from',
38
+ 'global',
39
+ 'if',
40
+ 'import',
41
+ 'in',
42
+ 'is',
43
+ 'lambda',
44
+ 'nonlocal',
45
+ 'not',
46
+ 'or',
47
+ 'pass',
48
+ 'raise',
49
+ 'return',
50
+ 'try',
51
+ 'while',
52
+ 'with',
53
+ 'yield'
54
+ ]
55
+
56
+ softkwlist = [
57
+ '_',
58
+ 'case',
59
+ 'match'
60
+ ]
61
+
62
+ iskeyword = frozenset(kwlist).__contains__
63
+ issoftkeyword = frozenset(softkwlist).__contains__
janus/lib/python3.10/nntplib.py ADDED
@@ -0,0 +1,1090 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """An NNTP client class based on:
2
+ - RFC 977: Network News Transfer Protocol
3
+ - RFC 2980: Common NNTP Extensions
4
+ - RFC 3977: Network News Transfer Protocol (version 2)
5
+
6
+ Example:
7
+
8
+ >>> from nntplib import NNTP
9
+ >>> s = NNTP('news')
10
+ >>> resp, count, first, last, name = s.group('comp.lang.python')
11
+ >>> print('Group', name, 'has', count, 'articles, range', first, 'to', last)
12
+ Group comp.lang.python has 51 articles, range 5770 to 5821
13
+ >>> resp, subs = s.xhdr('subject', '{0}-{1}'.format(first, last))
14
+ >>> resp = s.quit()
15
+ >>>
16
+
17
+ Here 'resp' is the server response line.
18
+ Error responses are turned into exceptions.
19
+
20
+ To post an article from a file:
21
+ >>> f = open(filename, 'rb') # file containing article, including header
22
+ >>> resp = s.post(f)
23
+ >>>
24
+
25
+ For descriptions of all methods, read the comments in the code below.
26
+ Note that all arguments and return values representing article numbers
27
+ are strings, not numbers, since they are rarely used for calculations.
28
+ """
29
+
30
+ # RFC 977 by Brian Kantor and Phil Lapsley.
31
+ # xover, xgtitle, xpath, date methods by Kevan Heydon
32
+
33
+ # Incompatible changes from the 2.x nntplib:
34
+ # - all commands are encoded as UTF-8 data (using the "surrogateescape"
35
+ # error handler), except for raw message data (POST, IHAVE)
36
+ # - all responses are decoded as UTF-8 data (using the "surrogateescape"
37
+ # error handler), except for raw message data (ARTICLE, HEAD, BODY)
38
+ # - the `file` argument to various methods is keyword-only
39
+ #
40
+ # - NNTP.date() returns a datetime object
41
+ # - NNTP.newgroups() and NNTP.newnews() take a datetime (or date) object,
42
+ # rather than a pair of (date, time) strings.
43
+ # - NNTP.newgroups() and NNTP.list() return a list of GroupInfo named tuples
44
+ # - NNTP.descriptions() returns a dict mapping group names to descriptions
45
+ # - NNTP.xover() returns a list of dicts mapping field names (header or metadata)
46
+ # to field values; each dict representing a message overview.
47
+ # - NNTP.article(), NNTP.head() and NNTP.body() return a (response, ArticleInfo)
48
+ # tuple.
49
+ # - the "internal" methods have been marked private (they now start with
50
+ # an underscore)
51
+
52
+ # Other changes from the 2.x/3.1 nntplib:
53
+ # - automatic querying of capabilities at connect
54
+ # - New method NNTP.getcapabilities()
55
+ # - New method NNTP.over()
56
+ # - New helper function decode_header()
57
+ # - NNTP.post() and NNTP.ihave() accept file objects, bytes-like objects and
58
+ # arbitrary iterables yielding lines.
59
+ # - An extensive test suite :-)
60
+
61
+ # TODO:
62
+ # - return structured data (GroupInfo etc.) everywhere
63
+ # - support HDR
64
+
65
+ # Imports
66
+ import re
67
+ import socket
68
+ import collections
69
+ import datetime
70
+ import sys
71
+
72
+ try:
73
+ import ssl
74
+ except ImportError:
75
+ _have_ssl = False
76
+ else:
77
+ _have_ssl = True
78
+
79
+ from email.header import decode_header as _email_decode_header
80
+ from socket import _GLOBAL_DEFAULT_TIMEOUT
81
+
82
+ __all__ = ["NNTP",
83
+ "NNTPError", "NNTPReplyError", "NNTPTemporaryError",
84
+ "NNTPPermanentError", "NNTPProtocolError", "NNTPDataError",
85
+ "decode_header",
86
+ ]
87
+
88
+ # maximal line length when calling readline(). This is to prevent
89
+ # reading arbitrary length lines. RFC 3977 limits NNTP line length to
90
+ # 512 characters, including CRLF. We have selected 2048 just to be on
91
+ # the safe side.
92
+ _MAXLINE = 2048
93
+
94
+
95
+ # Exceptions raised when an error or invalid response is received
96
+ class NNTPError(Exception):
97
+ """Base class for all nntplib exceptions"""
98
+ def __init__(self, *args):
99
+ Exception.__init__(self, *args)
100
+ try:
101
+ self.response = args[0]
102
+ except IndexError:
103
+ self.response = 'No response given'
104
+
105
+ class NNTPReplyError(NNTPError):
106
+ """Unexpected [123]xx reply"""
107
+ pass
108
+
109
+ class NNTPTemporaryError(NNTPError):
110
+ """4xx errors"""
111
+ pass
112
+
113
+ class NNTPPermanentError(NNTPError):
114
+ """5xx errors"""
115
+ pass
116
+
117
+ class NNTPProtocolError(NNTPError):
118
+ """Response does not begin with [1-5]"""
119
+ pass
120
+
121
+ class NNTPDataError(NNTPError):
122
+ """Error in response data"""
123
+ pass
124
+
125
+
126
+ # Standard port used by NNTP servers
127
+ NNTP_PORT = 119
128
+ NNTP_SSL_PORT = 563
129
+
130
+ # Response numbers that are followed by additional text (e.g. article)
131
+ _LONGRESP = {
132
+ '100', # HELP
133
+ '101', # CAPABILITIES
134
+ '211', # LISTGROUP (also not multi-line with GROUP)
135
+ '215', # LIST
136
+ '220', # ARTICLE
137
+ '221', # HEAD, XHDR
138
+ '222', # BODY
139
+ '224', # OVER, XOVER
140
+ '225', # HDR
141
+ '230', # NEWNEWS
142
+ '231', # NEWGROUPS
143
+ '282', # XGTITLE
144
+ }
145
+
146
+ # Default decoded value for LIST OVERVIEW.FMT if not supported
147
+ _DEFAULT_OVERVIEW_FMT = [
148
+ "subject", "from", "date", "message-id", "references", ":bytes", ":lines"]
149
+
150
+ # Alternative names allowed in LIST OVERVIEW.FMT response
151
+ _OVERVIEW_FMT_ALTERNATIVES = {
152
+ 'bytes': ':bytes',
153
+ 'lines': ':lines',
154
+ }
155
+
156
+ # Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
157
+ _CRLF = b'\r\n'
158
+
159
+ GroupInfo = collections.namedtuple('GroupInfo',
160
+ ['group', 'last', 'first', 'flag'])
161
+
162
+ ArticleInfo = collections.namedtuple('ArticleInfo',
163
+ ['number', 'message_id', 'lines'])
164
+
165
+
166
+ # Helper function(s)
167
+ def decode_header(header_str):
168
+ """Takes a unicode string representing a munged header value
169
+ and decodes it as a (possibly non-ASCII) readable value."""
170
+ parts = []
171
+ for v, enc in _email_decode_header(header_str):
172
+ if isinstance(v, bytes):
173
+ parts.append(v.decode(enc or 'ascii'))
174
+ else:
175
+ parts.append(v)
176
+ return ''.join(parts)
177
+
178
+ def _parse_overview_fmt(lines):
179
+ """Parse a list of string representing the response to LIST OVERVIEW.FMT
180
+ and return a list of header/metadata names.
181
+ Raises NNTPDataError if the response is not compliant
182
+ (cf. RFC 3977, section 8.4)."""
183
+ fmt = []
184
+ for line in lines:
185
+ if line[0] == ':':
186
+ # Metadata name (e.g. ":bytes")
187
+ name, _, suffix = line[1:].partition(':')
188
+ name = ':' + name
189
+ else:
190
+ # Header name (e.g. "Subject:" or "Xref:full")
191
+ name, _, suffix = line.partition(':')
192
+ name = name.lower()
193
+ name = _OVERVIEW_FMT_ALTERNATIVES.get(name, name)
194
+ # Should we do something with the suffix?
195
+ fmt.append(name)
196
+ defaults = _DEFAULT_OVERVIEW_FMT
197
+ if len(fmt) < len(defaults):
198
+ raise NNTPDataError("LIST OVERVIEW.FMT response too short")
199
+ if fmt[:len(defaults)] != defaults:
200
+ raise NNTPDataError("LIST OVERVIEW.FMT redefines default fields")
201
+ return fmt
202
+
203
+ def _parse_overview(lines, fmt, data_process_func=None):
204
+ """Parse the response to an OVER or XOVER command according to the
205
+ overview format `fmt`."""
206
+ n_defaults = len(_DEFAULT_OVERVIEW_FMT)
207
+ overview = []
208
+ for line in lines:
209
+ fields = {}
210
+ article_number, *tokens = line.split('\t')
211
+ article_number = int(article_number)
212
+ for i, token in enumerate(tokens):
213
+ if i >= len(fmt):
214
+ # XXX should we raise an error? Some servers might not
215
+ # support LIST OVERVIEW.FMT and still return additional
216
+ # headers.
217
+ continue
218
+ field_name = fmt[i]
219
+ is_metadata = field_name.startswith(':')
220
+ if i >= n_defaults and not is_metadata:
221
+ # Non-default header names are included in full in the response
222
+ # (unless the field is totally empty)
223
+ h = field_name + ": "
224
+ if token and token[:len(h)].lower() != h:
225
+ raise NNTPDataError("OVER/XOVER response doesn't include "
226
+ "names of additional headers")
227
+ token = token[len(h):] if token else None
228
+ fields[fmt[i]] = token
229
+ overview.append((article_number, fields))
230
+ return overview
231
+
232
+ def _parse_datetime(date_str, time_str=None):
233
+ """Parse a pair of (date, time) strings, and return a datetime object.
234
+ If only the date is given, it is assumed to be date and time
235
+ concatenated together (e.g. response to the DATE command).
236
+ """
237
+ if time_str is None:
238
+ time_str = date_str[-6:]
239
+ date_str = date_str[:-6]
240
+ hours = int(time_str[:2])
241
+ minutes = int(time_str[2:4])
242
+ seconds = int(time_str[4:])
243
+ year = int(date_str[:-4])
244
+ month = int(date_str[-4:-2])
245
+ day = int(date_str[-2:])
246
+ # RFC 3977 doesn't say how to interpret 2-char years. Assume that
247
+ # there are no dates before 1970 on Usenet.
248
+ if year < 70:
249
+ year += 2000
250
+ elif year < 100:
251
+ year += 1900
252
+ return datetime.datetime(year, month, day, hours, minutes, seconds)
253
+
254
+ def _unparse_datetime(dt, legacy=False):
255
+ """Format a date or datetime object as a pair of (date, time) strings
256
+ in the format required by the NEWNEWS and NEWGROUPS commands. If a
257
+ date object is passed, the time is assumed to be midnight (00h00).
258
+
259
+ The returned representation depends on the legacy flag:
260
+ * if legacy is False (the default):
261
+ date has the YYYYMMDD format and time the HHMMSS format
262
+ * if legacy is True:
263
+ date has the YYMMDD format and time the HHMMSS format.
264
+ RFC 3977 compliant servers should understand both formats; therefore,
265
+ legacy is only needed when talking to old servers.
266
+ """
267
+ if not isinstance(dt, datetime.datetime):
268
+ time_str = "000000"
269
+ else:
270
+ time_str = "{0.hour:02d}{0.minute:02d}{0.second:02d}".format(dt)
271
+ y = dt.year
272
+ if legacy:
273
+ y = y % 100
274
+ date_str = "{0:02d}{1.month:02d}{1.day:02d}".format(y, dt)
275
+ else:
276
+ date_str = "{0:04d}{1.month:02d}{1.day:02d}".format(y, dt)
277
+ return date_str, time_str
278
+
279
+
280
+ if _have_ssl:
281
+
282
+ def _encrypt_on(sock, context, hostname):
283
+ """Wrap a socket in SSL/TLS. Arguments:
284
+ - sock: Socket to wrap
285
+ - context: SSL context to use for the encrypted connection
286
+ Returns:
287
+ - sock: New, encrypted socket.
288
+ """
289
+ # Generate a default SSL context if none was passed.
290
+ if context is None:
291
+ context = ssl._create_stdlib_context()
292
+ return context.wrap_socket(sock, server_hostname=hostname)
293
+
294
+
295
+ # The classes themselves
296
+ class NNTP:
297
+ # UTF-8 is the character set for all NNTP commands and responses: they
298
+ # are automatically encoded (when sending) and decoded (and receiving)
299
+ # by this class.
300
+ # However, some multi-line data blocks can contain arbitrary bytes (for
301
+ # example, latin-1 or utf-16 data in the body of a message). Commands
302
+ # taking (POST, IHAVE) or returning (HEAD, BODY, ARTICLE) raw message
303
+ # data will therefore only accept and produce bytes objects.
304
+ # Furthermore, since there could be non-compliant servers out there,
305
+ # we use 'surrogateescape' as the error handler for fault tolerance
306
+ # and easy round-tripping. This could be useful for some applications
307
+ # (e.g. NNTP gateways).
308
+
309
+ encoding = 'utf-8'
310
+ errors = 'surrogateescape'
311
+
312
+ def __init__(self, host, port=NNTP_PORT, user=None, password=None,
313
+ readermode=None, usenetrc=False,
314
+ timeout=_GLOBAL_DEFAULT_TIMEOUT):
315
+ """Initialize an instance. Arguments:
316
+ - host: hostname to connect to
317
+ - port: port to connect to (default the standard NNTP port)
318
+ - user: username to authenticate with
319
+ - password: password to use with username
320
+ - readermode: if true, send 'mode reader' command after
321
+ connecting.
322
+ - usenetrc: allow loading username and password from ~/.netrc file
323
+ if not specified explicitly
324
+ - timeout: timeout (in seconds) used for socket connections
325
+
326
+ readermode is sometimes necessary if you are connecting to an
327
+ NNTP server on the local machine and intend to call
328
+ reader-specific commands, such as `group'. If you get
329
+ unexpected NNTPPermanentErrors, you might need to set
330
+ readermode.
331
+ """
332
+ self.host = host
333
+ self.port = port
334
+ self.sock = self._create_socket(timeout)
335
+ self.file = None
336
+ try:
337
+ self.file = self.sock.makefile("rwb")
338
+ self._base_init(readermode)
339
+ if user or usenetrc:
340
+ self.login(user, password, usenetrc)
341
+ except:
342
+ if self.file:
343
+ self.file.close()
344
+ self.sock.close()
345
+ raise
346
+
347
+ def _base_init(self, readermode):
348
+ """Partial initialization for the NNTP protocol.
349
+ This instance method is extracted for supporting the test code.
350
+ """
351
+ self.debugging = 0
352
+ self.welcome = self._getresp()
353
+
354
+ # Inquire about capabilities (RFC 3977).
355
+ self._caps = None
356
+ self.getcapabilities()
357
+
358
+ # 'MODE READER' is sometimes necessary to enable 'reader' mode.
359
+ # However, the order in which 'MODE READER' and 'AUTHINFO' need to
360
+ # arrive differs between some NNTP servers. If _setreadermode() fails
361
+ # with an authorization failed error, it will set this to True;
362
+ # the login() routine will interpret that as a request to try again
363
+ # after performing its normal function.
364
+ # Enable only if we're not already in READER mode anyway.
365
+ self.readermode_afterauth = False
366
+ if readermode and 'READER' not in self._caps:
367
+ self._setreadermode()
368
+ if not self.readermode_afterauth:
369
+ # Capabilities might have changed after MODE READER
370
+ self._caps = None
371
+ self.getcapabilities()
372
+
373
+ # RFC 4642 2.2.2: Both the client and the server MUST know if there is
374
+ # a TLS session active. A client MUST NOT attempt to start a TLS
375
+ # session if a TLS session is already active.
376
+ self.tls_on = False
377
+
378
+ # Log in and encryption setup order is left to subclasses.
379
+ self.authenticated = False
380
+
381
+ def __enter__(self):
382
+ return self
383
+
384
+ def __exit__(self, *args):
385
+ is_connected = lambda: hasattr(self, "file")
386
+ if is_connected():
387
+ try:
388
+ self.quit()
389
+ except (OSError, EOFError):
390
+ pass
391
+ finally:
392
+ if is_connected():
393
+ self._close()
394
+
395
+ def _create_socket(self, timeout):
396
+ if timeout is not None and not timeout:
397
+ raise ValueError('Non-blocking socket (timeout=0) is not supported')
398
+ sys.audit("nntplib.connect", self, self.host, self.port)
399
+ return socket.create_connection((self.host, self.port), timeout)
400
+
401
+ def getwelcome(self):
402
+ """Get the welcome message from the server
403
+ (this is read and squirreled away by __init__()).
404
+ If the response code is 200, posting is allowed;
405
+ if it 201, posting is not allowed."""
406
+
407
+ if self.debugging: print('*welcome*', repr(self.welcome))
408
+ return self.welcome
409
+
410
+ def getcapabilities(self):
411
+ """Get the server capabilities, as read by __init__().
412
+ If the CAPABILITIES command is not supported, an empty dict is
413
+ returned."""
414
+ if self._caps is None:
415
+ self.nntp_version = 1
416
+ self.nntp_implementation = None
417
+ try:
418
+ resp, caps = self.capabilities()
419
+ except (NNTPPermanentError, NNTPTemporaryError):
420
+ # Server doesn't support capabilities
421
+ self._caps = {}
422
+ else:
423
+ self._caps = caps
424
+ if 'VERSION' in caps:
425
+ # The server can advertise several supported versions,
426
+ # choose the highest.
427
+ self.nntp_version = max(map(int, caps['VERSION']))
428
+ if 'IMPLEMENTATION' in caps:
429
+ self.nntp_implementation = ' '.join(caps['IMPLEMENTATION'])
430
+ return self._caps
431
+
432
+ def set_debuglevel(self, level):
433
+ """Set the debugging level. Argument 'level' means:
434
+ 0: no debugging output (default)
435
+ 1: print commands and responses but not body text etc.
436
+ 2: also print raw lines read and sent before stripping CR/LF"""
437
+
438
+ self.debugging = level
439
+ debug = set_debuglevel
440
+
441
+ def _putline(self, line):
442
+ """Internal: send one line to the server, appending CRLF.
443
+ The `line` must be a bytes-like object."""
444
+ sys.audit("nntplib.putline", self, line)
445
+ line = line + _CRLF
446
+ if self.debugging > 1: print('*put*', repr(line))
447
+ self.file.write(line)
448
+ self.file.flush()
449
+
450
+ def _putcmd(self, line):
451
+ """Internal: send one command to the server (through _putline()).
452
+ The `line` must be a unicode string."""
453
+ if self.debugging: print('*cmd*', repr(line))
454
+ line = line.encode(self.encoding, self.errors)
455
+ self._putline(line)
456
+
457
+ def _getline(self, strip_crlf=True):
458
+ """Internal: return one line from the server, stripping _CRLF.
459
+ Raise EOFError if the connection is closed.
460
+ Returns a bytes object."""
461
+ line = self.file.readline(_MAXLINE +1)
462
+ if len(line) > _MAXLINE:
463
+ raise NNTPDataError('line too long')
464
+ if self.debugging > 1:
465
+ print('*get*', repr(line))
466
+ if not line: raise EOFError
467
+ if strip_crlf:
468
+ if line[-2:] == _CRLF:
469
+ line = line[:-2]
470
+ elif line[-1:] in _CRLF:
471
+ line = line[:-1]
472
+ return line
473
+
474
+ def _getresp(self):
475
+ """Internal: get a response from the server.
476
+ Raise various errors if the response indicates an error.
477
+ Returns a unicode string."""
478
+ resp = self._getline()
479
+ if self.debugging: print('*resp*', repr(resp))
480
+ resp = resp.decode(self.encoding, self.errors)
481
+ c = resp[:1]
482
+ if c == '4':
483
+ raise NNTPTemporaryError(resp)
484
+ if c == '5':
485
+ raise NNTPPermanentError(resp)
486
+ if c not in '123':
487
+ raise NNTPProtocolError(resp)
488
+ return resp
489
+
490
+ def _getlongresp(self, file=None):
491
+ """Internal: get a response plus following text from the server.
492
+ Raise various errors if the response indicates an error.
493
+
494
+ Returns a (response, lines) tuple where `response` is a unicode
495
+ string and `lines` is a list of bytes objects.
496
+ If `file` is a file-like object, it must be open in binary mode.
497
+ """
498
+
499
+ openedFile = None
500
+ try:
501
+ # If a string was passed then open a file with that name
502
+ if isinstance(file, (str, bytes)):
503
+ openedFile = file = open(file, "wb")
504
+
505
+ resp = self._getresp()
506
+ if resp[:3] not in _LONGRESP:
507
+ raise NNTPReplyError(resp)
508
+
509
+ lines = []
510
+ if file is not None:
511
+ # XXX lines = None instead?
512
+ terminators = (b'.' + _CRLF, b'.\n')
513
+ while 1:
514
+ line = self._getline(False)
515
+ if line in terminators:
516
+ break
517
+ if line.startswith(b'..'):
518
+ line = line[1:]
519
+ file.write(line)
520
+ else:
521
+ terminator = b'.'
522
+ while 1:
523
+ line = self._getline()
524
+ if line == terminator:
525
+ break
526
+ if line.startswith(b'..'):
527
+ line = line[1:]
528
+ lines.append(line)
529
+ finally:
530
+ # If this method created the file, then it must close it
531
+ if openedFile:
532
+ openedFile.close()
533
+
534
+ return resp, lines
535
+
536
+ def _shortcmd(self, line):
537
+ """Internal: send a command and get the response.
538
+ Same return value as _getresp()."""
539
+ self._putcmd(line)
540
+ return self._getresp()
541
+
542
+ def _longcmd(self, line, file=None):
543
+ """Internal: send a command and get the response plus following text.
544
+ Same return value as _getlongresp()."""
545
+ self._putcmd(line)
546
+ return self._getlongresp(file)
547
+
548
+ def _longcmdstring(self, line, file=None):
549
+ """Internal: send a command and get the response plus following text.
550
+ Same as _longcmd() and _getlongresp(), except that the returned `lines`
551
+ are unicode strings rather than bytes objects.
552
+ """
553
+ self._putcmd(line)
554
+ resp, list = self._getlongresp(file)
555
+ return resp, [line.decode(self.encoding, self.errors)
556
+ for line in list]
557
+
558
+ def _getoverviewfmt(self):
559
+ """Internal: get the overview format. Queries the server if not
560
+ already done, else returns the cached value."""
561
+ try:
562
+ return self._cachedoverviewfmt
563
+ except AttributeError:
564
+ pass
565
+ try:
566
+ resp, lines = self._longcmdstring("LIST OVERVIEW.FMT")
567
+ except NNTPPermanentError:
568
+ # Not supported by server?
569
+ fmt = _DEFAULT_OVERVIEW_FMT[:]
570
+ else:
571
+ fmt = _parse_overview_fmt(lines)
572
+ self._cachedoverviewfmt = fmt
573
+ return fmt
574
+
575
+ def _grouplist(self, lines):
576
+ # Parse lines into "group last first flag"
577
+ return [GroupInfo(*line.split()) for line in lines]
578
+
579
+ def capabilities(self):
580
+ """Process a CAPABILITIES command. Not supported by all servers.
581
+ Return:
582
+ - resp: server response if successful
583
+ - caps: a dictionary mapping capability names to lists of tokens
584
+ (for example {'VERSION': ['2'], 'OVER': [], LIST: ['ACTIVE', 'HEADERS'] })
585
+ """
586
+ caps = {}
587
+ resp, lines = self._longcmdstring("CAPABILITIES")
588
+ for line in lines:
589
+ name, *tokens = line.split()
590
+ caps[name] = tokens
591
+ return resp, caps
592
+
593
+ def newgroups(self, date, *, file=None):
594
+ """Process a NEWGROUPS command. Arguments:
595
+ - date: a date or datetime object
596
+ Return:
597
+ - resp: server response if successful
598
+ - list: list of newsgroup names
599
+ """
600
+ if not isinstance(date, (datetime.date, datetime.date)):
601
+ raise TypeError(
602
+ "the date parameter must be a date or datetime object, "
603
+ "not '{:40}'".format(date.__class__.__name__))
604
+ date_str, time_str = _unparse_datetime(date, self.nntp_version < 2)
605
+ cmd = 'NEWGROUPS {0} {1}'.format(date_str, time_str)
606
+ resp, lines = self._longcmdstring(cmd, file)
607
+ return resp, self._grouplist(lines)
608
+
609
+ def newnews(self, group, date, *, file=None):
610
+ """Process a NEWNEWS command. Arguments:
611
+ - group: group name or '*'
612
+ - date: a date or datetime object
613
+ Return:
614
+ - resp: server response if successful
615
+ - list: list of message ids
616
+ """
617
+ if not isinstance(date, (datetime.date, datetime.date)):
618
+ raise TypeError(
619
+ "the date parameter must be a date or datetime object, "
620
+ "not '{:40}'".format(date.__class__.__name__))
621
+ date_str, time_str = _unparse_datetime(date, self.nntp_version < 2)
622
+ cmd = 'NEWNEWS {0} {1} {2}'.format(group, date_str, time_str)
623
+ return self._longcmdstring(cmd, file)
624
+
625
+ def list(self, group_pattern=None, *, file=None):
626
+ """Process a LIST or LIST ACTIVE command. Arguments:
627
+ - group_pattern: a pattern indicating which groups to query
628
+ - file: Filename string or file object to store the result in
629
+ Returns:
630
+ - resp: server response if successful
631
+ - list: list of (group, last, first, flag) (strings)
632
+ """
633
+ if group_pattern is not None:
634
+ command = 'LIST ACTIVE ' + group_pattern
635
+ else:
636
+ command = 'LIST'
637
+ resp, lines = self._longcmdstring(command, file)
638
+ return resp, self._grouplist(lines)
639
+
640
+ def _getdescriptions(self, group_pattern, return_all):
641
+ line_pat = re.compile('^(?P<group>[^ \t]+)[ \t]+(.*)$')
642
+ # Try the more std (acc. to RFC2980) LIST NEWSGROUPS first
643
+ resp, lines = self._longcmdstring('LIST NEWSGROUPS ' + group_pattern)
644
+ if not resp.startswith('215'):
645
+ # Now the deprecated XGTITLE. This either raises an error
646
+ # or succeeds with the same output structure as LIST
647
+ # NEWSGROUPS.
648
+ resp, lines = self._longcmdstring('XGTITLE ' + group_pattern)
649
+ groups = {}
650
+ for raw_line in lines:
651
+ match = line_pat.search(raw_line.strip())
652
+ if match:
653
+ name, desc = match.group(1, 2)
654
+ if not return_all:
655
+ return desc
656
+ groups[name] = desc
657
+ if return_all:
658
+ return resp, groups
659
+ else:
660
+ # Nothing found
661
+ return ''
662
+
663
+ def description(self, group):
664
+ """Get a description for a single group. If more than one
665
+ group matches ('group' is a pattern), return the first. If no
666
+ group matches, return an empty string.
667
+
668
+ This elides the response code from the server, since it can
669
+ only be '215' or '285' (for xgtitle) anyway. If the response
670
+ code is needed, use the 'descriptions' method.
671
+
672
+ NOTE: This neither checks for a wildcard in 'group' nor does
673
+ it check whether the group actually exists."""
674
+ return self._getdescriptions(group, False)
675
+
676
+ def descriptions(self, group_pattern):
677
+ """Get descriptions for a range of groups."""
678
+ return self._getdescriptions(group_pattern, True)
679
+
680
+ def group(self, name):
681
+ """Process a GROUP command. Argument:
682
+ - group: the group name
683
+ Returns:
684
+ - resp: server response if successful
685
+ - count: number of articles
686
+ - first: first article number
687
+ - last: last article number
688
+ - name: the group name
689
+ """
690
+ resp = self._shortcmd('GROUP ' + name)
691
+ if not resp.startswith('211'):
692
+ raise NNTPReplyError(resp)
693
+ words = resp.split()
694
+ count = first = last = 0
695
+ n = len(words)
696
+ if n > 1:
697
+ count = words[1]
698
+ if n > 2:
699
+ first = words[2]
700
+ if n > 3:
701
+ last = words[3]
702
+ if n > 4:
703
+ name = words[4].lower()
704
+ return resp, int(count), int(first), int(last), name
705
+
706
+ def help(self, *, file=None):
707
+ """Process a HELP command. Argument:
708
+ - file: Filename string or file object to store the result in
709
+ Returns:
710
+ - resp: server response if successful
711
+ - list: list of strings returned by the server in response to the
712
+ HELP command
713
+ """
714
+ return self._longcmdstring('HELP', file)
715
+
716
+ def _statparse(self, resp):
717
+ """Internal: parse the response line of a STAT, NEXT, LAST,
718
+ ARTICLE, HEAD or BODY command."""
719
+ if not resp.startswith('22'):
720
+ raise NNTPReplyError(resp)
721
+ words = resp.split()
722
+ art_num = int(words[1])
723
+ message_id = words[2]
724
+ return resp, art_num, message_id
725
+
726
+ def _statcmd(self, line):
727
+ """Internal: process a STAT, NEXT or LAST command."""
728
+ resp = self._shortcmd(line)
729
+ return self._statparse(resp)
730
+
731
+ def stat(self, message_spec=None):
732
+ """Process a STAT command. Argument:
733
+ - message_spec: article number or message id (if not specified,
734
+ the current article is selected)
735
+ Returns:
736
+ - resp: server response if successful
737
+ - art_num: the article number
738
+ - message_id: the message id
739
+ """
740
+ if message_spec:
741
+ return self._statcmd('STAT {0}'.format(message_spec))
742
+ else:
743
+ return self._statcmd('STAT')
744
+
745
+ def next(self):
746
+ """Process a NEXT command. No arguments. Return as for STAT."""
747
+ return self._statcmd('NEXT')
748
+
749
+ def last(self):
750
+ """Process a LAST command. No arguments. Return as for STAT."""
751
+ return self._statcmd('LAST')
752
+
753
+ def _artcmd(self, line, file=None):
754
+ """Internal: process a HEAD, BODY or ARTICLE command."""
755
+ resp, lines = self._longcmd(line, file)
756
+ resp, art_num, message_id = self._statparse(resp)
757
+ return resp, ArticleInfo(art_num, message_id, lines)
758
+
759
+ def head(self, message_spec=None, *, file=None):
760
+ """Process a HEAD command. Argument:
761
+ - message_spec: article number or message id
762
+ - file: filename string or file object to store the headers in
763
+ Returns:
764
+ - resp: server response if successful
765
+ - ArticleInfo: (article number, message id, list of header lines)
766
+ """
767
+ if message_spec is not None:
768
+ cmd = 'HEAD {0}'.format(message_spec)
769
+ else:
770
+ cmd = 'HEAD'
771
+ return self._artcmd(cmd, file)
772
+
773
+ def body(self, message_spec=None, *, file=None):
774
+ """Process a BODY command. Argument:
775
+ - message_spec: article number or message id
776
+ - file: filename string or file object to store the body in
777
+ Returns:
778
+ - resp: server response if successful
779
+ - ArticleInfo: (article number, message id, list of body lines)
780
+ """
781
+ if message_spec is not None:
782
+ cmd = 'BODY {0}'.format(message_spec)
783
+ else:
784
+ cmd = 'BODY'
785
+ return self._artcmd(cmd, file)
786
+
787
+ def article(self, message_spec=None, *, file=None):
788
+ """Process an ARTICLE command. Argument:
789
+ - message_spec: article number or message id
790
+ - file: filename string or file object to store the article in
791
+ Returns:
792
+ - resp: server response if successful
793
+ - ArticleInfo: (article number, message id, list of article lines)
794
+ """
795
+ if message_spec is not None:
796
+ cmd = 'ARTICLE {0}'.format(message_spec)
797
+ else:
798
+ cmd = 'ARTICLE'
799
+ return self._artcmd(cmd, file)
800
+
801
+ def slave(self):
802
+ """Process a SLAVE command. Returns:
803
+ - resp: server response if successful
804
+ """
805
+ return self._shortcmd('SLAVE')
806
+
807
+ def xhdr(self, hdr, str, *, file=None):
808
+ """Process an XHDR command (optional server extension). Arguments:
809
+ - hdr: the header type (e.g. 'subject')
810
+ - str: an article nr, a message id, or a range nr1-nr2
811
+ - file: Filename string or file object to store the result in
812
+ Returns:
813
+ - resp: server response if successful
814
+ - list: list of (nr, value) strings
815
+ """
816
+ pat = re.compile('^([0-9]+) ?(.*)\n?')
817
+ resp, lines = self._longcmdstring('XHDR {0} {1}'.format(hdr, str), file)
818
+ def remove_number(line):
819
+ m = pat.match(line)
820
+ return m.group(1, 2) if m else line
821
+ return resp, [remove_number(line) for line in lines]
822
+
823
+ def xover(self, start, end, *, file=None):
824
+ """Process an XOVER command (optional server extension) Arguments:
825
+ - start: start of range
826
+ - end: end of range
827
+ - file: Filename string or file object to store the result in
828
+ Returns:
829
+ - resp: server response if successful
830
+ - list: list of dicts containing the response fields
831
+ """
832
+ resp, lines = self._longcmdstring('XOVER {0}-{1}'.format(start, end),
833
+ file)
834
+ fmt = self._getoverviewfmt()
835
+ return resp, _parse_overview(lines, fmt)
836
+
837
+ def over(self, message_spec, *, file=None):
838
+ """Process an OVER command. If the command isn't supported, fall
839
+ back to XOVER. Arguments:
840
+ - message_spec:
841
+ - either a message id, indicating the article to fetch
842
+ information about
843
+ - or a (start, end) tuple, indicating a range of article numbers;
844
+ if end is None, information up to the newest message will be
845
+ retrieved
846
+ - or None, indicating the current article number must be used
847
+ - file: Filename string or file object to store the result in
848
+ Returns:
849
+ - resp: server response if successful
850
+ - list: list of dicts containing the response fields
851
+
852
+ NOTE: the "message id" form isn't supported by XOVER
853
+ """
854
+ cmd = 'OVER' if 'OVER' in self._caps else 'XOVER'
855
+ if isinstance(message_spec, (tuple, list)):
856
+ start, end = message_spec
857
+ cmd += ' {0}-{1}'.format(start, end or '')
858
+ elif message_spec is not None:
859
+ cmd = cmd + ' ' + message_spec
860
+ resp, lines = self._longcmdstring(cmd, file)
861
+ fmt = self._getoverviewfmt()
862
+ return resp, _parse_overview(lines, fmt)
863
+
864
+ def date(self):
865
+ """Process the DATE command.
866
+ Returns:
867
+ - resp: server response if successful
868
+ - date: datetime object
869
+ """
870
+ resp = self._shortcmd("DATE")
871
+ if not resp.startswith('111'):
872
+ raise NNTPReplyError(resp)
873
+ elem = resp.split()
874
+ if len(elem) != 2:
875
+ raise NNTPDataError(resp)
876
+ date = elem[1]
877
+ if len(date) != 14:
878
+ raise NNTPDataError(resp)
879
+ return resp, _parse_datetime(date, None)
880
+
881
+ def _post(self, command, f):
882
+ resp = self._shortcmd(command)
883
+ # Raises a specific exception if posting is not allowed
884
+ if not resp.startswith('3'):
885
+ raise NNTPReplyError(resp)
886
+ if isinstance(f, (bytes, bytearray)):
887
+ f = f.splitlines()
888
+ # We don't use _putline() because:
889
+ # - we don't want additional CRLF if the file or iterable is already
890
+ # in the right format
891
+ # - we don't want a spurious flush() after each line is written
892
+ for line in f:
893
+ if not line.endswith(_CRLF):
894
+ line = line.rstrip(b"\r\n") + _CRLF
895
+ if line.startswith(b'.'):
896
+ line = b'.' + line
897
+ self.file.write(line)
898
+ self.file.write(b".\r\n")
899
+ self.file.flush()
900
+ return self._getresp()
901
+
902
+ def post(self, data):
903
+ """Process a POST command. Arguments:
904
+ - data: bytes object, iterable or file containing the article
905
+ Returns:
906
+ - resp: server response if successful"""
907
+ return self._post('POST', data)
908
+
909
+ def ihave(self, message_id, data):
910
+ """Process an IHAVE command. Arguments:
911
+ - message_id: message-id of the article
912
+ - data: file containing the article
913
+ Returns:
914
+ - resp: server response if successful
915
+ Note that if the server refuses the article an exception is raised."""
916
+ return self._post('IHAVE {0}'.format(message_id), data)
917
+
918
+ def _close(self):
919
+ try:
920
+ if self.file:
921
+ self.file.close()
922
+ del self.file
923
+ finally:
924
+ self.sock.close()
925
+
926
+ def quit(self):
927
+ """Process a QUIT command and close the socket. Returns:
928
+ - resp: server response if successful"""
929
+ try:
930
+ resp = self._shortcmd('QUIT')
931
+ finally:
932
+ self._close()
933
+ return resp
934
+
935
+ def login(self, user=None, password=None, usenetrc=True):
936
+ if self.authenticated:
937
+ raise ValueError("Already logged in.")
938
+ if not user and not usenetrc:
939
+ raise ValueError(
940
+ "At least one of `user` and `usenetrc` must be specified")
941
+ # If no login/password was specified but netrc was requested,
942
+ # try to get them from ~/.netrc
943
+ # Presume that if .netrc has an entry, NNRP authentication is required.
944
+ try:
945
+ if usenetrc and not user:
946
+ import netrc
947
+ credentials = netrc.netrc()
948
+ auth = credentials.authenticators(self.host)
949
+ if auth:
950
+ user = auth[0]
951
+ password = auth[2]
952
+ except OSError:
953
+ pass
954
+ # Perform NNTP authentication if needed.
955
+ if not user:
956
+ return
957
+ resp = self._shortcmd('authinfo user ' + user)
958
+ if resp.startswith('381'):
959
+ if not password:
960
+ raise NNTPReplyError(resp)
961
+ else:
962
+ resp = self._shortcmd('authinfo pass ' + password)
963
+ if not resp.startswith('281'):
964
+ raise NNTPPermanentError(resp)
965
+ # Capabilities might have changed after login
966
+ self._caps = None
967
+ self.getcapabilities()
968
+ # Attempt to send mode reader if it was requested after login.
969
+ # Only do so if we're not in reader mode already.
970
+ if self.readermode_afterauth and 'READER' not in self._caps:
971
+ self._setreadermode()
972
+ # Capabilities might have changed after MODE READER
973
+ self._caps = None
974
+ self.getcapabilities()
975
+
976
+ def _setreadermode(self):
977
+ try:
978
+ self.welcome = self._shortcmd('mode reader')
979
+ except NNTPPermanentError:
980
+ # Error 5xx, probably 'not implemented'
981
+ pass
982
+ except NNTPTemporaryError as e:
983
+ if e.response.startswith('480'):
984
+ # Need authorization before 'mode reader'
985
+ self.readermode_afterauth = True
986
+ else:
987
+ raise
988
+
989
+ if _have_ssl:
990
+ def starttls(self, context=None):
991
+ """Process a STARTTLS command. Arguments:
992
+ - context: SSL context to use for the encrypted connection
993
+ """
994
+ # Per RFC 4642, STARTTLS MUST NOT be sent after authentication or if
995
+ # a TLS session already exists.
996
+ if self.tls_on:
997
+ raise ValueError("TLS is already enabled.")
998
+ if self.authenticated:
999
+ raise ValueError("TLS cannot be started after authentication.")
1000
+ resp = self._shortcmd('STARTTLS')
1001
+ if resp.startswith('382'):
1002
+ self.file.close()
1003
+ self.sock = _encrypt_on(self.sock, context, self.host)
1004
+ self.file = self.sock.makefile("rwb")
1005
+ self.tls_on = True
1006
+ # Capabilities may change after TLS starts up, so ask for them
1007
+ # again.
1008
+ self._caps = None
1009
+ self.getcapabilities()
1010
+ else:
1011
+ raise NNTPError("TLS failed to start.")
1012
+
1013
+
1014
+ if _have_ssl:
1015
+ class NNTP_SSL(NNTP):
1016
+
1017
+ def __init__(self, host, port=NNTP_SSL_PORT,
1018
+ user=None, password=None, ssl_context=None,
1019
+ readermode=None, usenetrc=False,
1020
+ timeout=_GLOBAL_DEFAULT_TIMEOUT):
1021
+ """This works identically to NNTP.__init__, except for the change
1022
+ in default port and the `ssl_context` argument for SSL connections.
1023
+ """
1024
+ self.ssl_context = ssl_context
1025
+ super().__init__(host, port, user, password, readermode,
1026
+ usenetrc, timeout)
1027
+
1028
+ def _create_socket(self, timeout):
1029
+ sock = super()._create_socket(timeout)
1030
+ try:
1031
+ sock = _encrypt_on(sock, self.ssl_context, self.host)
1032
+ except:
1033
+ sock.close()
1034
+ raise
1035
+ else:
1036
+ return sock
1037
+
1038
+ __all__.append("NNTP_SSL")
1039
+
1040
+
1041
+ # Test retrieval when run as a script.
1042
+ if __name__ == '__main__':
1043
+ import argparse
1044
+
1045
+ parser = argparse.ArgumentParser(description="""\
1046
+ nntplib built-in demo - display the latest articles in a newsgroup""")
1047
+ parser.add_argument('-g', '--group', default='gmane.comp.python.general',
1048
+ help='group to fetch messages from (default: %(default)s)')
1049
+ parser.add_argument('-s', '--server', default='news.gmane.io',
1050
+ help='NNTP server hostname (default: %(default)s)')
1051
+ parser.add_argument('-p', '--port', default=-1, type=int,
1052
+ help='NNTP port number (default: %s / %s)' % (NNTP_PORT, NNTP_SSL_PORT))
1053
+ parser.add_argument('-n', '--nb-articles', default=10, type=int,
1054
+ help='number of articles to fetch (default: %(default)s)')
1055
+ parser.add_argument('-S', '--ssl', action='store_true', default=False,
1056
+ help='use NNTP over SSL')
1057
+ args = parser.parse_args()
1058
+
1059
+ port = args.port
1060
+ if not args.ssl:
1061
+ if port == -1:
1062
+ port = NNTP_PORT
1063
+ s = NNTP(host=args.server, port=port)
1064
+ else:
1065
+ if port == -1:
1066
+ port = NNTP_SSL_PORT
1067
+ s = NNTP_SSL(host=args.server, port=port)
1068
+
1069
+ caps = s.getcapabilities()
1070
+ if 'STARTTLS' in caps:
1071
+ s.starttls()
1072
+ resp, count, first, last, name = s.group(args.group)
1073
+ print('Group', name, 'has', count, 'articles, range', first, 'to', last)
1074
+
1075
+ def cut(s, lim):
1076
+ if len(s) > lim:
1077
+ s = s[:lim - 4] + "..."
1078
+ return s
1079
+
1080
+ first = str(int(last) - args.nb_articles + 1)
1081
+ resp, overviews = s.xover(first, last)
1082
+ for artnum, over in overviews:
1083
+ author = decode_header(over['from']).split('<', 1)[0]
1084
+ subject = decode_header(over['subject'])
1085
+ lines = int(over[':lines'])
1086
+ print("{:7} {:20} {:42} ({})".format(
1087
+ artnum, cut(author, 20), cut(subject, 42), lines)
1088
+ )
1089
+
1090
+ s.quit()