Light-Dav commited on
Commit
ce27c7c
·
verified ·
1 Parent(s): 24d75f6

Add files using upload-large-folder tool

Browse files
venv/Lib/site-packages/decorator.py ADDED
@@ -0,0 +1,459 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ######################### LICENSE ############################ #
2
+
3
+ # Copyright (c) 2005-2025, Michele Simionato
4
+ # All rights reserved.
5
+
6
+ # Redistribution and use in source and binary forms, with or without
7
+ # modification, are permitted provided that the following conditions are
8
+ # met:
9
+
10
+ # Redistributions of source code must retain the above copyright
11
+ # notice, this list of conditions and the following disclaimer.
12
+ # Redistributions in bytecode form must reproduce the above copyright
13
+ # notice, this list of conditions and the following disclaimer in
14
+ # the documentation and/or other materials provided with the
15
+ # distribution.
16
+
17
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21
+ # HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
22
+ # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
23
+ # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
24
+ # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25
+ # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
26
+ # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
27
+ # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
28
+ # DAMAGE.
29
+
30
+ """
31
+ Decorator module, see
32
+ https://github.com/micheles/decorator/blob/master/docs/documentation.md
33
+ for the documentation.
34
+ """
35
+ import re
36
+ import sys
37
+ import inspect
38
+ import operator
39
+ import itertools
40
+ import functools
41
+ from contextlib import _GeneratorContextManager
42
+ from inspect import getfullargspec, iscoroutinefunction, isgeneratorfunction
43
+
44
+ __version__ = '5.2.1'
45
+
46
+ DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(')
47
+ POS = inspect.Parameter.POSITIONAL_OR_KEYWORD
48
+ EMPTY = inspect.Parameter.empty
49
+
50
+
51
+ # this is not used anymore in the core, but kept for backward compatibility
52
+ class FunctionMaker(object):
53
+ """
54
+ An object with the ability to create functions with a given signature.
55
+ It has attributes name, doc, module, signature, defaults, dict and
56
+ methods update and make.
57
+ """
58
+
59
+ # Atomic get-and-increment provided by the GIL
60
+ _compile_count = itertools.count()
61
+
62
+ # make pylint happy
63
+ args = varargs = varkw = defaults = kwonlyargs = kwonlydefaults = ()
64
+
65
+ def __init__(self, func=None, name=None, signature=None,
66
+ defaults=None, doc=None, module=None, funcdict=None):
67
+ self.shortsignature = signature
68
+ if func:
69
+ # func can be a class or a callable, but not an instance method
70
+ self.name = func.__name__
71
+ if self.name == '<lambda>': # small hack for lambda functions
72
+ self.name = '_lambda_'
73
+ self.doc = func.__doc__
74
+ self.module = func.__module__
75
+ if inspect.isroutine(func) or isinstance(func, functools.partial):
76
+ argspec = getfullargspec(func)
77
+ self.annotations = getattr(func, '__annotations__', {})
78
+ for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
79
+ 'kwonlydefaults'):
80
+ setattr(self, a, getattr(argspec, a))
81
+ for i, arg in enumerate(self.args):
82
+ setattr(self, 'arg%d' % i, arg)
83
+ allargs = list(self.args)
84
+ allshortargs = list(self.args)
85
+ if self.varargs:
86
+ allargs.append('*' + self.varargs)
87
+ allshortargs.append('*' + self.varargs)
88
+ elif self.kwonlyargs:
89
+ allargs.append('*') # single star syntax
90
+ for a in self.kwonlyargs:
91
+ allargs.append('%s=None' % a)
92
+ allshortargs.append('%s=%s' % (a, a))
93
+ if self.varkw:
94
+ allargs.append('**' + self.varkw)
95
+ allshortargs.append('**' + self.varkw)
96
+ self.signature = ', '.join(allargs)
97
+ self.shortsignature = ', '.join(allshortargs)
98
+ self.dict = func.__dict__.copy()
99
+ # func=None happens when decorating a caller
100
+ if name:
101
+ self.name = name
102
+ if signature is not None:
103
+ self.signature = signature
104
+ if defaults:
105
+ self.defaults = defaults
106
+ if doc:
107
+ self.doc = doc
108
+ if module:
109
+ self.module = module
110
+ if funcdict:
111
+ self.dict = funcdict
112
+ # check existence required attributes
113
+ assert hasattr(self, 'name')
114
+ if not hasattr(self, 'signature'):
115
+ raise TypeError('You are decorating a non function: %s' % func)
116
+
117
+ def update(self, func, **kw):
118
+ """
119
+ Update the signature of func with the data in self
120
+ """
121
+ func.__name__ = self.name
122
+ func.__doc__ = getattr(self, 'doc', None)
123
+ func.__dict__ = getattr(self, 'dict', {})
124
+ func.__defaults__ = self.defaults
125
+ func.__kwdefaults__ = self.kwonlydefaults or None
126
+ func.__annotations__ = getattr(self, 'annotations', None)
127
+ try:
128
+ frame = sys._getframe(3)
129
+ except AttributeError: # for IronPython and similar implementations
130
+ callermodule = '?'
131
+ else:
132
+ callermodule = frame.f_globals.get('__name__', '?')
133
+ func.__module__ = getattr(self, 'module', callermodule)
134
+ func.__dict__.update(kw)
135
+
136
+ def make(self, src_templ, evaldict=None, addsource=False, **attrs):
137
+ """
138
+ Make a new function from a given template and update the signature
139
+ """
140
+ src = src_templ % vars(self) # expand name and signature
141
+ evaldict = evaldict or {}
142
+ mo = DEF.search(src)
143
+ if mo is None:
144
+ raise SyntaxError('not a valid function template\n%s' % src)
145
+ name = mo.group(1) # extract the function name
146
+ names = set([name] + [arg.strip(' *') for arg in
147
+ self.shortsignature.split(',')])
148
+ for n in names:
149
+ if n in ('_func_', '_call_'):
150
+ raise NameError('%s is overridden in\n%s' % (n, src))
151
+
152
+ if not src.endswith('\n'): # add a newline for old Pythons
153
+ src += '\n'
154
+
155
+ # Ensure each generated function has a unique filename for profilers
156
+ # (such as cProfile) that depend on the tuple of (<filename>,
157
+ # <definition line>, <function name>) being unique.
158
+ filename = '<decorator-gen-%d>' % next(self._compile_count)
159
+ try:
160
+ code = compile(src, filename, 'single')
161
+ exec(code, evaldict)
162
+ except Exception:
163
+ print('Error in generated code:', file=sys.stderr)
164
+ print(src, file=sys.stderr)
165
+ raise
166
+ func = evaldict[name]
167
+ if addsource:
168
+ attrs['__source__'] = src
169
+ self.update(func, **attrs)
170
+ return func
171
+
172
+ @classmethod
173
+ def create(cls, obj, body, evaldict, defaults=None,
174
+ doc=None, module=None, addsource=True, **attrs):
175
+ """
176
+ Create a function from the strings name, signature and body.
177
+ evaldict is the evaluation dictionary. If addsource is true an
178
+ attribute __source__ is added to the result. The attributes attrs
179
+ are added, if any.
180
+ """
181
+ if isinstance(obj, str): # "name(signature)"
182
+ name, rest = obj.strip().split('(', 1)
183
+ signature = rest[:-1] # strip a right parens
184
+ func = None
185
+ else: # a function
186
+ name = None
187
+ signature = None
188
+ func = obj
189
+ self = cls(func, name, signature, defaults, doc, module)
190
+ ibody = '\n'.join(' ' + line for line in body.splitlines())
191
+ caller = evaldict.get('_call_') # when called from `decorate`
192
+ if caller and iscoroutinefunction(caller):
193
+ body = ('async def %(name)s(%(signature)s):\n' + ibody).replace(
194
+ 'return', 'return await')
195
+ else:
196
+ body = 'def %(name)s(%(signature)s):\n' + ibody
197
+ return self.make(body, evaldict, addsource, **attrs)
198
+
199
+
200
+ def fix(args, kwargs, sig):
201
+ """
202
+ Fix args and kwargs to be consistent with the signature
203
+ """
204
+ ba = sig.bind(*args, **kwargs)
205
+ ba.apply_defaults() # needed for test_dan_schult
206
+ return ba.args, ba.kwargs
207
+
208
+
209
+ def decorate(func, caller, extras=(), kwsyntax=False):
210
+ """
211
+ Decorates a function/generator/coroutine using a caller.
212
+ If kwsyntax is True calling the decorated functions with keyword
213
+ syntax will pass the named arguments inside the ``kw`` dictionary,
214
+ even if such argument are positional, similarly to what functools.wraps
215
+ does. By default kwsyntax is False and the the arguments are untouched.
216
+ """
217
+ sig = inspect.signature(func)
218
+ if isinstance(func, functools.partial):
219
+ func = functools.update_wrapper(func, func.func)
220
+ if iscoroutinefunction(caller):
221
+ async def fun(*args, **kw):
222
+ if not kwsyntax:
223
+ args, kw = fix(args, kw, sig)
224
+ return await caller(func, *(extras + args), **kw)
225
+ elif isgeneratorfunction(caller):
226
+ def fun(*args, **kw):
227
+ if not kwsyntax:
228
+ args, kw = fix(args, kw, sig)
229
+ for res in caller(func, *(extras + args), **kw):
230
+ yield res
231
+ else:
232
+ def fun(*args, **kw):
233
+ if not kwsyntax:
234
+ args, kw = fix(args, kw, sig)
235
+ return caller(func, *(extras + args), **kw)
236
+
237
+ fun.__name__ = func.__name__
238
+ fun.__doc__ = func.__doc__
239
+ fun.__wrapped__ = func
240
+ fun.__signature__ = sig
241
+ fun.__qualname__ = func.__qualname__
242
+ # builtin functions like defaultdict.__setitem__ lack many attributes
243
+ try:
244
+ fun.__defaults__ = func.__defaults__
245
+ except AttributeError:
246
+ pass
247
+ try:
248
+ fun.__kwdefaults__ = func.__kwdefaults__
249
+ except AttributeError:
250
+ pass
251
+ try:
252
+ fun.__annotations__ = func.__annotations__
253
+ except AttributeError:
254
+ pass
255
+ try:
256
+ fun.__module__ = func.__module__
257
+ except AttributeError:
258
+ pass
259
+ try:
260
+ fun.__name__ = func.__name__
261
+ except AttributeError: # happens with old versions of numpy.vectorize
262
+ func.__name__ == 'noname'
263
+ try:
264
+ fun.__dict__.update(func.__dict__)
265
+ except AttributeError:
266
+ pass
267
+ return fun
268
+
269
+
270
+ def decoratorx(caller):
271
+ """
272
+ A version of "decorator" implemented via "exec" and not via the
273
+ Signature object. Use this if you are want to preserve the `.__code__`
274
+ object properties (https://github.com/micheles/decorator/issues/129).
275
+ """
276
+ def dec(func):
277
+ return FunctionMaker.create(
278
+ func,
279
+ "return _call_(_func_, %(shortsignature)s)",
280
+ dict(_call_=caller, _func_=func),
281
+ __wrapped__=func, __qualname__=func.__qualname__)
282
+ return dec
283
+
284
+
285
+ def decorator(caller, _func=None, kwsyntax=False):
286
+ """
287
+ decorator(caller) converts a caller function into a decorator
288
+ """
289
+ if _func is not None: # return a decorated function
290
+ # this is obsolete behavior; you should use decorate instead
291
+ return decorate(_func, caller, (), kwsyntax)
292
+ # else return a decorator function
293
+ sig = inspect.signature(caller)
294
+ dec_params = [p for p in sig.parameters.values() if p.kind is POS]
295
+
296
+ def dec(func=None, *args, **kw):
297
+ na = len(args) + 1
298
+ extras = args + tuple(kw.get(p.name, p.default)
299
+ for p in dec_params[na:]
300
+ if p.default is not EMPTY)
301
+ if func is None:
302
+ return lambda func: decorate(func, caller, extras, kwsyntax)
303
+ else:
304
+ return decorate(func, caller, extras, kwsyntax)
305
+ dec.__signature__ = sig.replace(parameters=dec_params)
306
+ dec.__name__ = caller.__name__
307
+ dec.__doc__ = caller.__doc__
308
+ dec.__wrapped__ = caller
309
+ dec.__qualname__ = caller.__qualname__
310
+ dec.__kwdefaults__ = getattr(caller, '__kwdefaults__', None)
311
+ dec.__dict__.update(caller.__dict__)
312
+ return dec
313
+
314
+
315
+ # ####################### contextmanager ####################### #
316
+
317
+
318
+ class ContextManager(_GeneratorContextManager):
319
+ def __init__(self, g, *a, **k):
320
+ _GeneratorContextManager.__init__(self, g, a, k)
321
+
322
+ def __call__(self, func):
323
+ def caller(f, *a, **k):
324
+ with self.__class__(self.func, *self.args, **self.kwds):
325
+ return f(*a, **k)
326
+ return decorate(func, caller)
327
+
328
+
329
+ _contextmanager = decorator(ContextManager)
330
+
331
+
332
+ def contextmanager(func):
333
+ # Enable Pylint config: contextmanager-decorators=decorator.contextmanager
334
+ return _contextmanager(func)
335
+
336
+
337
+ # ############################ dispatch_on ############################ #
338
+
339
+ def append(a, vancestors):
340
+ """
341
+ Append ``a`` to the list of the virtual ancestors, unless it is already
342
+ included.
343
+ """
344
+ add = True
345
+ for j, va in enumerate(vancestors):
346
+ if issubclass(va, a):
347
+ add = False
348
+ break
349
+ if issubclass(a, va):
350
+ vancestors[j] = a
351
+ add = False
352
+ if add:
353
+ vancestors.append(a)
354
+
355
+
356
+ # inspired from simplegeneric by P.J. Eby and functools.singledispatch
357
+ def dispatch_on(*dispatch_args):
358
+ """
359
+ Factory of decorators turning a function into a generic function
360
+ dispatching on the given arguments.
361
+ """
362
+ assert dispatch_args, 'No dispatch args passed'
363
+ dispatch_str = '(%s,)' % ', '.join(dispatch_args)
364
+
365
+ def check(arguments, wrong=operator.ne, msg=''):
366
+ """Make sure one passes the expected number of arguments"""
367
+ if wrong(len(arguments), len(dispatch_args)):
368
+ raise TypeError('Expected %d arguments, got %d%s' %
369
+ (len(dispatch_args), len(arguments), msg))
370
+
371
+ def gen_func_dec(func):
372
+ """Decorator turning a function into a generic function"""
373
+
374
+ # first check the dispatch arguments
375
+ argset = set(getfullargspec(func).args)
376
+ if not set(dispatch_args) <= argset:
377
+ raise NameError('Unknown dispatch arguments %s' % dispatch_str)
378
+
379
+ typemap = {}
380
+
381
+ def vancestors(*types):
382
+ """
383
+ Get a list of sets of virtual ancestors for the given types
384
+ """
385
+ check(types)
386
+ ras = [[] for _ in range(len(dispatch_args))]
387
+ for types_ in typemap:
388
+ for t, type_, ra in zip(types, types_, ras):
389
+ if issubclass(t, type_) and type_ not in t.mro():
390
+ append(type_, ra)
391
+ return [set(ra) for ra in ras]
392
+
393
+ def ancestors(*types):
394
+ """
395
+ Get a list of virtual MROs, one for each type
396
+ """
397
+ check(types)
398
+ lists = []
399
+ for t, vas in zip(types, vancestors(*types)):
400
+ n_vas = len(vas)
401
+ if n_vas > 1:
402
+ raise RuntimeError(
403
+ 'Ambiguous dispatch for %s: %s' % (t, vas))
404
+ elif n_vas == 1:
405
+ va, = vas
406
+ mro = type('t', (t, va), {}).mro()[1:]
407
+ else:
408
+ mro = t.mro()
409
+ lists.append(mro[:-1]) # discard t and object
410
+ return lists
411
+
412
+ def register(*types):
413
+ """
414
+ Decorator to register an implementation for the given types
415
+ """
416
+ check(types)
417
+
418
+ def dec(f):
419
+ check(getfullargspec(f).args, operator.lt, ' in ' + f.__name__)
420
+ typemap[types] = f
421
+ return f
422
+ return dec
423
+
424
+ def dispatch_info(*types):
425
+ """
426
+ An utility to introspect the dispatch algorithm
427
+ """
428
+ check(types)
429
+ lst = []
430
+ for ancs in itertools.product(*ancestors(*types)):
431
+ lst.append(tuple(a.__name__ for a in ancs))
432
+ return lst
433
+
434
+ def _dispatch(dispatch_args, *args, **kw):
435
+ types = tuple(type(arg) for arg in dispatch_args)
436
+ try: # fast path
437
+ f = typemap[types]
438
+ except KeyError:
439
+ pass
440
+ else:
441
+ return f(*args, **kw)
442
+ combinations = itertools.product(*ancestors(*types))
443
+ next(combinations) # the first one has been already tried
444
+ for types_ in combinations:
445
+ f = typemap.get(types_)
446
+ if f is not None:
447
+ return f(*args, **kw)
448
+
449
+ # else call the default implementation
450
+ return func(*args, **kw)
451
+
452
+ return FunctionMaker.create(
453
+ func, 'return _f_(%s, %%(shortsignature)s)' % dispatch_str,
454
+ dict(_f_=_dispatch), register=register, default=func,
455
+ typemap=typemap, vancestors=vancestors, ancestors=ancestors,
456
+ dispatch_info=dispatch_info, __wrapped__=func)
457
+
458
+ gen_func_dec.__name__ = 'dispatch_on' + dispatch_str
459
+ return gen_func_dec
venv/Lib/site-packages/ipykernel_launcher.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Entry point for launching an IPython kernel.
2
+
3
+ This is separate from the ipykernel package so we can avoid doing imports until
4
+ after removing the cwd from sys.path.
5
+ """
6
+
7
+ import sys
8
+ from pathlib import Path
9
+
10
+ if __name__ == "__main__":
11
+ # Remove the CWD from sys.path while we load stuff.
12
+ # This is added back by InteractiveShellApp.init_path()
13
+ if sys.path[0] == "" or Path(sys.path[0]) == Path.cwd():
14
+ del sys.path[0]
15
+
16
+ from ipykernel import kernelapp as app
17
+
18
+ app.launch_new_instance()
venv/Lib/site-packages/ipython_pygments_lexers.py ADDED
@@ -0,0 +1,582 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Defines a variety of Pygments lexers for highlighting IPython code.
4
+
5
+ This includes:
6
+
7
+ IPythonLexer, IPython3Lexer
8
+ Lexers for pure IPython (python + magic/shell commands)
9
+
10
+ IPythonPartialTracebackLexer, IPythonTracebackLexer
11
+ Supports 2.x and 3.x via keyword `python3`. The partial traceback
12
+ lexer reads everything but the Python code appearing in a traceback.
13
+ The full lexer combines the partial lexer with an IPython lexer.
14
+
15
+ IPythonConsoleLexer
16
+ A lexer for IPython console sessions, with support for tracebacks.
17
+
18
+ IPyLexer
19
+ A friendly lexer which examines the first line of text and from it,
20
+ decides whether to use an IPython lexer or an IPython console lexer.
21
+ This is probably the only lexer that needs to be explicitly added
22
+ to Pygments.
23
+
24
+ """
25
+ # -----------------------------------------------------------------------------
26
+ # Copyright (c) 2013, the IPython Development Team.
27
+ #
28
+ # Distributed under the terms of the Modified BSD License.
29
+ #
30
+ # The full license is in the file COPYING.txt, distributed with this software.
31
+ # -----------------------------------------------------------------------------
32
+
33
+ __version__ = "1.1.1"
34
+
35
+ # Standard library
36
+ import re
37
+
38
+ # Third party
39
+ from pygments.lexers import (
40
+ BashLexer,
41
+ HtmlLexer,
42
+ JavascriptLexer,
43
+ RubyLexer,
44
+ PerlLexer,
45
+ Python2Lexer,
46
+ Python3Lexer,
47
+ TexLexer,
48
+ )
49
+ from pygments.lexer import (
50
+ Lexer,
51
+ DelegatingLexer,
52
+ RegexLexer,
53
+ do_insertions,
54
+ bygroups,
55
+ using,
56
+ )
57
+ from pygments.token import (
58
+ Generic,
59
+ Keyword,
60
+ Literal,
61
+ Name,
62
+ Operator,
63
+ Other,
64
+ Text,
65
+ Error,
66
+ )
67
+
68
+
69
+ line_re = re.compile(".*?\n")
70
+
71
+ __all__ = [
72
+ "IPython3Lexer",
73
+ "IPythonLexer",
74
+ "IPythonPartialTracebackLexer",
75
+ "IPythonTracebackLexer",
76
+ "IPythonConsoleLexer",
77
+ "IPyLexer",
78
+ ]
79
+
80
+
81
+ ipython_tokens = [
82
+ (
83
+ r"(?s)(\s*)(%%capture)([^\n]*\n)(.*)",
84
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
85
+ ),
86
+ (
87
+ r"(?s)(\s*)(%%debug)([^\n]*\n)(.*)",
88
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
89
+ ),
90
+ (
91
+ r"(?is)(\s*)(%%html)([^\n]*\n)(.*)",
92
+ bygroups(Text, Operator, Text, using(HtmlLexer)),
93
+ ),
94
+ (
95
+ r"(?s)(\s*)(%%javascript)([^\n]*\n)(.*)",
96
+ bygroups(Text, Operator, Text, using(JavascriptLexer)),
97
+ ),
98
+ (
99
+ r"(?s)(\s*)(%%js)([^\n]*\n)(.*)",
100
+ bygroups(Text, Operator, Text, using(JavascriptLexer)),
101
+ ),
102
+ (
103
+ r"(?s)(\s*)(%%latex)([^\n]*\n)(.*)",
104
+ bygroups(Text, Operator, Text, using(TexLexer)),
105
+ ),
106
+ (
107
+ r"(?s)(\s*)(%%perl)([^\n]*\n)(.*)",
108
+ bygroups(Text, Operator, Text, using(PerlLexer)),
109
+ ),
110
+ (
111
+ r"(?s)(\s*)(%%prun)([^\n]*\n)(.*)",
112
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
113
+ ),
114
+ (
115
+ r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)",
116
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
117
+ ),
118
+ (
119
+ r"(?s)(\s*)(%%python2)([^\n]*\n)(.*)",
120
+ bygroups(Text, Operator, Text, using(Python2Lexer)),
121
+ ),
122
+ (
123
+ r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)",
124
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
125
+ ),
126
+ (
127
+ r"(?s)(\s*)(%%python)([^\n]*\n)(.*)",
128
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
129
+ ),
130
+ (
131
+ r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)",
132
+ bygroups(Text, Operator, Text, using(RubyLexer)),
133
+ ),
134
+ (
135
+ r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)",
136
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
137
+ ),
138
+ (
139
+ r"(?s)(\s*)(%%time)([^\n]*\n)(.*)",
140
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
141
+ ),
142
+ (
143
+ r"(?s)(\s*)(%%writefile)([^\n]*\n)(.*)",
144
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
145
+ ),
146
+ (
147
+ r"(?s)(\s*)(%%file)([^\n]*\n)(.*)",
148
+ bygroups(Text, Operator, Text, using(Python3Lexer)),
149
+ ),
150
+ (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
151
+ (
152
+ r"(?s)(^\s*)(%%!)([^\n]*\n)(.*)",
153
+ bygroups(Text, Operator, Text, using(BashLexer)),
154
+ ),
155
+ (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
156
+ (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
157
+ (r"(%)(sx|sc|system)(.*)(\n)", bygroups(Operator, Keyword, using(BashLexer), Text)),
158
+ (r"(%)(\w+)(.*\n)", bygroups(Operator, Keyword, Text)),
159
+ (r"^(!!)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)),
160
+ (r"(!)(?!=)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)),
161
+ (r"^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)", bygroups(Text, Operator, Text)),
162
+ (r"(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$", bygroups(Text, Operator, Text)),
163
+ ]
164
+
165
+
166
+ class IPython3Lexer(Python3Lexer):
167
+ """IPython code lexer (based on Python 3)"""
168
+
169
+ name = "IPython"
170
+ aliases = ["ipython", "ipython3"]
171
+
172
+ tokens = Python3Lexer.tokens.copy()
173
+ tokens["root"] = ipython_tokens + tokens["root"]
174
+
175
+
176
+ IPythonLexer = IPython3Lexer
177
+
178
+
179
+ class IPythonPartialTracebackLexer(RegexLexer):
180
+ """
181
+ Partial lexer for IPython tracebacks.
182
+
183
+ Handles all the non-python output.
184
+
185
+ """
186
+
187
+ name = "IPython Partial Traceback"
188
+
189
+ tokens = {
190
+ "root": [
191
+ # Tracebacks for syntax errors have a different style.
192
+ # For both types of tracebacks, we mark the first line with
193
+ # Generic.Traceback. For syntax errors, we mark the filename
194
+ # as we mark the filenames for non-syntax tracebacks.
195
+ #
196
+ # These two regexps define how IPythonConsoleLexer finds a
197
+ # traceback.
198
+ #
199
+ ## Non-syntax traceback
200
+ (r"^(\^C)?(-+\n)", bygroups(Error, Generic.Traceback)),
201
+ ## Syntax traceback
202
+ (
203
+ r"^( File)(.*)(, line )(\d+\n)",
204
+ bygroups(
205
+ Generic.Traceback,
206
+ Name.Namespace,
207
+ Generic.Traceback,
208
+ Literal.Number.Integer,
209
+ ),
210
+ ),
211
+ # (Exception Identifier)(Whitespace)(Traceback Message)
212
+ (
213
+ r"(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)",
214
+ bygroups(Name.Exception, Generic.Whitespace, Text),
215
+ ),
216
+ # (Module/Filename)(Text)(Callee)(Function Signature)
217
+ # Better options for callee and function signature?
218
+ (
219
+ r"(.*)( in )(.*)(\(.*\)\n)",
220
+ bygroups(Name.Namespace, Text, Name.Entity, Name.Tag),
221
+ ),
222
+ # Regular line: (Whitespace)(Line Number)(Python Code)
223
+ (
224
+ r"(\s*?)(\d+)(.*?\n)",
225
+ bygroups(Generic.Whitespace, Literal.Number.Integer, Other),
226
+ ),
227
+ # Emphasized line: (Arrow)(Line Number)(Python Code)
228
+ # Using Exception token so arrow color matches the Exception.
229
+ (
230
+ r"(-*>?\s?)(\d+)(.*?\n)",
231
+ bygroups(Name.Exception, Literal.Number.Integer, Other),
232
+ ),
233
+ # (Exception Identifier)(Message)
234
+ (r"(?u)(^[^\d\W]\w*)(:.*?\n)", bygroups(Name.Exception, Text)),
235
+ # Tag everything else as Other, will be handled later.
236
+ (r".*\n", Other),
237
+ ],
238
+ }
239
+
240
+
241
+ class IPythonTracebackLexer(DelegatingLexer):
242
+ """
243
+ IPython traceback lexer.
244
+
245
+ For doctests, the tracebacks can be snipped as much as desired with the
246
+ exception to the lines that designate a traceback. For non-syntax error
247
+ tracebacks, this is the line of hyphens. For syntax error tracebacks,
248
+ this is the line which lists the File and line number.
249
+
250
+ """
251
+
252
+ # The lexer inherits from DelegatingLexer. The "root" lexer is an
253
+ # appropriate IPython lexer, which depends on the value of the boolean
254
+ # `python3`. First, we parse with the partial IPython traceback lexer.
255
+ # Then, any code marked with the "Other" token is delegated to the root
256
+ # lexer.
257
+ #
258
+ name = "IPython Traceback"
259
+ aliases = ["ipythontb", "ipython3tb"]
260
+
261
+ def __init__(self, **options):
262
+ """
263
+ A subclass of `DelegatingLexer` which delegates to the appropriate to either IPyLexer,
264
+ IPythonPartialTracebackLexer.
265
+ """
266
+ # note we need a __init__ doc, as otherwise it inherits the doc from the super class
267
+ # which will fail the documentation build as it references section of the pygments docs that
268
+ # do not exists when building IPython's docs.
269
+ DelegatingLexer.__init__(
270
+ self, IPython3Lexer, IPythonPartialTracebackLexer, **options
271
+ )
272
+
273
+
274
+ class IPythonConsoleLexer(Lexer):
275
+ """
276
+ An IPython console lexer for IPython code-blocks and doctests, such as:
277
+
278
+ .. code-block:: rst
279
+
280
+ .. code-block:: ipythonconsole
281
+
282
+ In [1]: a = 'foo'
283
+
284
+ In [2]: a
285
+ Out[2]: 'foo'
286
+
287
+ In [3]: print(a)
288
+ foo
289
+
290
+
291
+ Support is also provided for IPython exceptions:
292
+
293
+ .. code-block:: rst
294
+
295
+ .. code-block:: ipythonconsole
296
+
297
+ In [1]: raise Exception
298
+ Traceback (most recent call last):
299
+ ...
300
+ Exception
301
+
302
+ """
303
+
304
+ name = "IPython console session"
305
+ aliases = ["ipythonconsole", "ipython3console"]
306
+ mimetypes = ["text/x-ipython-console"]
307
+
308
+ # The regexps used to determine what is input and what is output.
309
+ # The default prompts for IPython are:
310
+ #
311
+ # in = 'In [#]: '
312
+ # continuation = ' .D.: '
313
+ # template = 'Out[#]: '
314
+ #
315
+ # Where '#' is the 'prompt number' or 'execution count' and 'D'
316
+ # D is a number of dots matching the width of the execution count
317
+ #
318
+ in1_regex = r"In \[[0-9]+\]: "
319
+ in2_regex = r" \.\.+\.: "
320
+ out_regex = r"Out\[[0-9]+\]: "
321
+
322
+ #: The regex to determine when a traceback starts.
323
+ ipytb_start = re.compile(r"^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)")
324
+
325
+ def __init__(self, **options):
326
+ """Initialize the IPython console lexer.
327
+
328
+ Parameters
329
+ ----------
330
+ in1_regex : RegexObject
331
+ The compiled regular expression used to detect the start
332
+ of inputs. Although the IPython configuration setting may have a
333
+ trailing whitespace, do not include it in the regex. If `None`,
334
+ then the default input prompt is assumed.
335
+ in2_regex : RegexObject
336
+ The compiled regular expression used to detect the continuation
337
+ of inputs. Although the IPython configuration setting may have a
338
+ trailing whitespace, do not include it in the regex. If `None`,
339
+ then the default input prompt is assumed.
340
+ out_regex : RegexObject
341
+ The compiled regular expression used to detect outputs. If `None`,
342
+ then the default output prompt is assumed.
343
+
344
+ """
345
+ in1_regex = options.get("in1_regex", self.in1_regex)
346
+ in2_regex = options.get("in2_regex", self.in2_regex)
347
+ out_regex = options.get("out_regex", self.out_regex)
348
+
349
+ # So that we can work with input and output prompts which have been
350
+ # rstrip'd (possibly by editors) we also need rstrip'd variants. If
351
+ # we do not do this, then such prompts will be tagged as 'output'.
352
+ # The reason can't just use the rstrip'd variants instead is because
353
+ # we want any whitespace associated with the prompt to be inserted
354
+ # with the token. This allows formatted code to be modified so as hide
355
+ # the appearance of prompts, with the whitespace included. One example
356
+ # use of this is in copybutton.js from the standard lib Python docs.
357
+ in1_regex_rstrip = in1_regex.rstrip() + "\n"
358
+ in2_regex_rstrip = in2_regex.rstrip() + "\n"
359
+ out_regex_rstrip = out_regex.rstrip() + "\n"
360
+
361
+ # Compile and save them all.
362
+ attrs = [
363
+ "in1_regex",
364
+ "in2_regex",
365
+ "out_regex",
366
+ "in1_regex_rstrip",
367
+ "in2_regex_rstrip",
368
+ "out_regex_rstrip",
369
+ ]
370
+ for attr in attrs:
371
+ self.__setattr__(attr, re.compile(locals()[attr]))
372
+
373
+ Lexer.__init__(self, **options)
374
+
375
+ self.pylexer = IPython3Lexer(**options)
376
+ self.tblexer = IPythonTracebackLexer(**options)
377
+
378
+ self.reset()
379
+
380
+ def reset(self):
381
+ self.mode = "output"
382
+ self.index = 0
383
+ self.buffer = ""
384
+ self.insertions = []
385
+
386
+ def buffered_tokens(self):
387
+ """
388
+ Generator of unprocessed tokens after doing insertions and before
389
+ changing to a new state.
390
+
391
+ """
392
+ if self.mode == "output":
393
+ tokens = [(0, Generic.Output, self.buffer)]
394
+ elif self.mode == "input":
395
+ tokens = self.pylexer.get_tokens_unprocessed(self.buffer)
396
+ else: # traceback
397
+ tokens = self.tblexer.get_tokens_unprocessed(self.buffer)
398
+
399
+ for i, t, v in do_insertions(self.insertions, tokens):
400
+ # All token indexes are relative to the buffer.
401
+ yield self.index + i, t, v
402
+
403
+ # Clear it all
404
+ self.index += len(self.buffer)
405
+ self.buffer = ""
406
+ self.insertions = []
407
+
408
+ def get_mci(self, line):
409
+ """
410
+ Parses the line and returns a 3-tuple: (mode, code, insertion).
411
+
412
+ `mode` is the next mode (or state) of the lexer, and is always equal
413
+ to 'input', 'output', or 'tb'.
414
+
415
+ `code` is a portion of the line that should be added to the buffer
416
+ corresponding to the next mode and eventually lexed by another lexer.
417
+ For example, `code` could be Python code if `mode` were 'input'.
418
+
419
+ `insertion` is a 3-tuple (index, token, text) representing an
420
+ unprocessed "token" that will be inserted into the stream of tokens
421
+ that are created from the buffer once we change modes. This is usually
422
+ the input or output prompt.
423
+
424
+ In general, the next mode depends on current mode and on the contents
425
+ of `line`.
426
+
427
+ """
428
+ # To reduce the number of regex match checks, we have multiple
429
+ # 'if' blocks instead of 'if-elif' blocks.
430
+
431
+ # Check for possible end of input
432
+ in2_match = self.in2_regex.match(line)
433
+ in2_match_rstrip = self.in2_regex_rstrip.match(line)
434
+ if (
435
+ in2_match and in2_match.group().rstrip() == line.rstrip()
436
+ ) or in2_match_rstrip:
437
+ end_input = True
438
+ else:
439
+ end_input = False
440
+ if end_input and self.mode != "tb":
441
+ # Only look for an end of input when not in tb mode.
442
+ # An ellipsis could appear within the traceback.
443
+ mode = "output"
444
+ code = ""
445
+ insertion = (0, Generic.Prompt, line)
446
+ return mode, code, insertion
447
+
448
+ # Check for output prompt
449
+ out_match = self.out_regex.match(line)
450
+ out_match_rstrip = self.out_regex_rstrip.match(line)
451
+ if out_match or out_match_rstrip:
452
+ mode = "output"
453
+ if out_match:
454
+ idx = out_match.end()
455
+ else:
456
+ idx = out_match_rstrip.end()
457
+ code = line[idx:]
458
+ # Use the 'heading' token for output. We cannot use Generic.Error
459
+ # since it would conflict with exceptions.
460
+ insertion = (0, Generic.Heading, line[:idx])
461
+ return mode, code, insertion
462
+
463
+ # Check for input or continuation prompt (non stripped version)
464
+ in1_match = self.in1_regex.match(line)
465
+ if in1_match or (in2_match and self.mode != "tb"):
466
+ # New input or when not in tb, continued input.
467
+ # We do not check for continued input when in tb since it is
468
+ # allowable to replace a long stack with an ellipsis.
469
+ mode = "input"
470
+ if in1_match:
471
+ idx = in1_match.end()
472
+ else: # in2_match
473
+ idx = in2_match.end()
474
+ code = line[idx:]
475
+ insertion = (0, Generic.Prompt, line[:idx])
476
+ return mode, code, insertion
477
+
478
+ # Check for input or continuation prompt (stripped version)
479
+ in1_match_rstrip = self.in1_regex_rstrip.match(line)
480
+ if in1_match_rstrip or (in2_match_rstrip and self.mode != "tb"):
481
+ # New input or when not in tb, continued input.
482
+ # We do not check for continued input when in tb since it is
483
+ # allowable to replace a long stack with an ellipsis.
484
+ mode = "input"
485
+ if in1_match_rstrip:
486
+ idx = in1_match_rstrip.end()
487
+ else: # in2_match
488
+ idx = in2_match_rstrip.end()
489
+ code = line[idx:]
490
+ insertion = (0, Generic.Prompt, line[:idx])
491
+ return mode, code, insertion
492
+
493
+ # Check for traceback
494
+ if self.ipytb_start.match(line):
495
+ mode = "tb"
496
+ code = line
497
+ insertion = None
498
+ return mode, code, insertion
499
+
500
+ # All other stuff...
501
+ if self.mode in ("input", "output"):
502
+ # We assume all other text is output. Multiline input that
503
+ # does not use the continuation marker cannot be detected.
504
+ # For example, the 3 in the following is clearly output:
505
+ #
506
+ # In [1]: print(3)
507
+ # 3
508
+ #
509
+ # But the following second line is part of the input:
510
+ #
511
+ # In [2]: while True:
512
+ # print(True)
513
+ #
514
+ # In both cases, the 2nd line will be 'output'.
515
+ #
516
+ mode = "output"
517
+ else:
518
+ mode = "tb"
519
+
520
+ code = line
521
+ insertion = None
522
+
523
+ return mode, code, insertion
524
+
525
+ def get_tokens_unprocessed(self, text):
526
+ self.reset()
527
+ for match in line_re.finditer(text):
528
+ line = match.group()
529
+ mode, code, insertion = self.get_mci(line)
530
+
531
+ if mode != self.mode:
532
+ # Yield buffered tokens before transitioning to new mode.
533
+ for token in self.buffered_tokens():
534
+ yield token
535
+ self.mode = mode
536
+
537
+ if insertion:
538
+ self.insertions.append((len(self.buffer), [insertion]))
539
+ self.buffer += code
540
+
541
+ for token in self.buffered_tokens():
542
+ yield token
543
+
544
+
545
+ class IPyLexer(Lexer):
546
+ r"""
547
+ Primary lexer for all IPython-like code.
548
+
549
+ This is a simple helper lexer. If the first line of the text begins with
550
+ "In \[[0-9]+\]:", then the entire text is parsed with an IPython console
551
+ lexer. If not, then the entire text is parsed with an IPython lexer.
552
+
553
+ The goal is to reduce the number of lexers that are registered
554
+ with Pygments.
555
+
556
+ """
557
+
558
+ name = "IPy session"
559
+ aliases = ["ipy", "ipy3"]
560
+
561
+ def __init__(self, **options):
562
+ """
563
+ Create a new IPyLexer instance which dispatch to either an
564
+ IPythonCOnsoleLexer (if In prompts are present) or and IPythonLexer (if
565
+ In prompts are not present).
566
+ """
567
+ # init docstring is necessary for docs not to fail to build do to parent
568
+ # docs referenceing a section in pygments docs.
569
+ Lexer.__init__(self, **options)
570
+
571
+ self.IPythonLexer = IPythonLexer(**options)
572
+ self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
573
+
574
+ def get_tokens_unprocessed(self, text):
575
+ # Search for the input prompt anywhere...this allows code blocks to
576
+ # begin with comments as well.
577
+ if re.match(r".*(In \[[0-9]+\]:)", text.strip(), re.DOTALL):
578
+ lex = self.IPythonConsoleLexer
579
+ else:
580
+ lex = self.IPythonLexer
581
+ for token in lex.get_tokens_unprocessed(text):
582
+ yield token
venv/Lib/site-packages/isympy.py ADDED
@@ -0,0 +1,342 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Python shell for SymPy.
3
+
4
+ This is just a normal Python shell (IPython shell if you have the
5
+ IPython package installed), that executes the following commands for
6
+ the user:
7
+
8
+ >>> from __future__ import division
9
+ >>> from sympy import *
10
+ >>> x, y, z, t = symbols('x y z t')
11
+ >>> k, m, n = symbols('k m n', integer=True)
12
+ >>> f, g, h = symbols('f g h', cls=Function)
13
+ >>> init_printing()
14
+
15
+ So starting 'isympy' is equivalent to starting Python (or IPython) and
16
+ executing the above commands by hand. It is intended for easy and quick
17
+ experimentation with SymPy. isympy is a good way to use SymPy as an
18
+ interactive calculator. If you have IPython and Matplotlib installed, then
19
+ interactive plotting is enabled by default.
20
+
21
+ COMMAND LINE OPTIONS
22
+ --------------------
23
+
24
+ -c CONSOLE, --console=CONSOLE
25
+
26
+ Use the specified shell (Python or IPython) shell as the console
27
+ backend instead of the default one (IPython if present, Python
28
+ otherwise), e.g.:
29
+
30
+ $isympy -c python
31
+
32
+ CONSOLE must be one of 'ipython' or 'python'
33
+
34
+ -p PRETTY, --pretty PRETTY
35
+
36
+ Setup pretty-printing in SymPy. When pretty-printing is enabled,
37
+ expressions can be printed with Unicode or ASCII. The default is
38
+ to use pretty-printing (with Unicode if the terminal supports it).
39
+ When this option is 'no', expressions will not be pretty-printed
40
+ and ASCII will be used:
41
+
42
+ $isympy -p no
43
+
44
+ PRETTY must be one of 'unicode', 'ascii', or 'no'
45
+
46
+ -t TYPES, --types=TYPES
47
+
48
+ Setup the ground types for the polys. By default, gmpy ground types
49
+ are used if gmpy2 or gmpy is installed, otherwise it falls back to python
50
+ ground types, which are a little bit slower. You can manually
51
+ choose python ground types even if gmpy is installed (e.g., for
52
+ testing purposes):
53
+
54
+ $isympy -t python
55
+
56
+ TYPES must be one of 'gmpy', 'gmpy1' or 'python'
57
+
58
+ Note that the ground type gmpy1 is primarily intended for testing; it
59
+ forces the use of gmpy version 1 even if gmpy2 is available.
60
+
61
+ This is the same as setting the environment variable
62
+ SYMPY_GROUND_TYPES to the given ground type (e.g.,
63
+ SYMPY_GROUND_TYPES='gmpy')
64
+
65
+ The ground types can be determined interactively from the variable
66
+ sympy.polys.domains.GROUND_TYPES.
67
+
68
+ -o ORDER, --order ORDER
69
+
70
+ Setup the ordering of terms for printing. The default is lex, which
71
+ orders terms lexicographically (e.g., x**2 + x + 1). You can choose
72
+ other orderings, such as rev-lex, which will use reverse
73
+ lexicographic ordering (e.g., 1 + x + x**2):
74
+
75
+ $isympy -o rev-lex
76
+
77
+ ORDER must be one of 'lex', 'rev-lex', 'grlex', 'rev-grlex',
78
+ 'grevlex', 'rev-grevlex', 'old', or 'none'.
79
+
80
+ Note that for very large expressions, ORDER='none' may speed up
81
+ printing considerably but the terms will have no canonical order.
82
+
83
+ -q, --quiet
84
+
85
+ Print only Python's and SymPy's versions to stdout at startup.
86
+
87
+ -d, --doctest
88
+
89
+ Use the same format that should be used for doctests. This is
90
+ equivalent to -c python -p no.
91
+
92
+ -C, --no-cache
93
+
94
+ Disable the caching mechanism. Disabling the cache may slow certain
95
+ operations down considerably. This is useful for testing the cache,
96
+ or for benchmarking, as the cache can result in deceptive timings.
97
+
98
+ This is equivalent to setting the environment variable
99
+ SYMPY_USE_CACHE to 'no'.
100
+
101
+ -a, --auto-symbols (requires at least IPython 0.11)
102
+
103
+ Automatically create missing symbols. Normally, typing a name of a
104
+ Symbol that has not been instantiated first would raise NameError,
105
+ but with this option enabled, any undefined name will be
106
+ automatically created as a Symbol.
107
+
108
+ Note that this is intended only for interactive, calculator style
109
+ usage. In a script that uses SymPy, Symbols should be instantiated
110
+ at the top, so that it's clear what they are.
111
+
112
+ This will not override any names that are already defined, which
113
+ includes the single character letters represented by the mnemonic
114
+ QCOSINE (see the "Gotchas and Pitfalls" document in the
115
+ documentation). You can delete existing names by executing "del
116
+ name". If a name is defined, typing "'name' in dir()" will return True.
117
+
118
+ The Symbols that are created using this have default assumptions.
119
+ If you want to place assumptions on symbols, you should create them
120
+ using symbols() or var().
121
+
122
+ Finally, this only works in the top level namespace. So, for
123
+ example, if you define a function in isympy with an undefined
124
+ Symbol, it will not work.
125
+
126
+ See also the -i and -I options.
127
+
128
+ -i, --int-to-Integer (requires at least IPython 0.11)
129
+
130
+ Automatically wrap int literals with Integer. This makes it so that
131
+ things like 1/2 will come out as Rational(1, 2), rather than 0.5. This
132
+ works by preprocessing the source and wrapping all int literals with
133
+ Integer. Note that this will not change the behavior of int literals
134
+ assigned to variables, and it also won't change the behavior of functions
135
+ that return int literals.
136
+
137
+ If you want an int, you can wrap the literal in int(), e.g. int(3)/int(2)
138
+ gives 1.5 (with division imported from __future__).
139
+
140
+ -I, --interactive (requires at least IPython 0.11)
141
+
142
+ This is equivalent to --auto-symbols --int-to-Integer. Future options
143
+ designed for ease of interactive use may be added to this.
144
+
145
+ -D, --debug
146
+
147
+ Enable debugging output. This is the same as setting the
148
+ environment variable SYMPY_DEBUG to 'True'. The debug status is set
149
+ in the variable SYMPY_DEBUG within isympy.
150
+
151
+ -- IPython options
152
+
153
+ Additionally you can pass command line options directly to the IPython
154
+ interpreter (the standard Python shell is not supported). However you
155
+ need to add the '--' separator between two types of options, e.g the
156
+ startup banner option and the colors option. You need to enter the
157
+ options as required by the version of IPython that you are using, too:
158
+
159
+ in IPython 0.11,
160
+
161
+ $isympy -q -- --colors=NoColor
162
+
163
+ or older versions of IPython,
164
+
165
+ $isympy -q -- -colors NoColor
166
+
167
+ See also isympy --help.
168
+ """
169
+
170
+ import os
171
+ import sys
172
+
173
+ # DO NOT IMPORT SYMPY HERE! Or the setting of the sympy environment variables
174
+ # by the command line will break.
175
+
176
+ def main() -> None:
177
+ from argparse import ArgumentParser, RawDescriptionHelpFormatter
178
+
179
+ VERSION = None
180
+ if '--version' in sys.argv:
181
+ # We cannot import sympy before this is run, because flags like -C and
182
+ # -t set environment variables that must be set before SymPy is
183
+ # imported. The only thing we need to import it for is to get the
184
+ # version, which only matters with the --version flag.
185
+ import sympy
186
+ VERSION = sympy.__version__
187
+
188
+ usage = 'isympy [options] -- [ipython options]'
189
+ parser = ArgumentParser(
190
+ usage=usage,
191
+ description=__doc__,
192
+ formatter_class=RawDescriptionHelpFormatter,
193
+ )
194
+
195
+ parser.add_argument('--version', action='version', version=VERSION)
196
+
197
+ parser.add_argument(
198
+ '-c', '--console',
199
+ dest='console',
200
+ action='store',
201
+ default=None,
202
+ choices=['ipython', 'python'],
203
+ metavar='CONSOLE',
204
+ help='select type of interactive session: ipython | python; defaults '
205
+ 'to ipython if IPython is installed, otherwise python')
206
+
207
+ parser.add_argument(
208
+ '-p', '--pretty',
209
+ dest='pretty',
210
+ action='store',
211
+ default=None,
212
+ metavar='PRETTY',
213
+ choices=['unicode', 'ascii', 'no'],
214
+ help='setup pretty printing: unicode | ascii | no; defaults to '
215
+ 'unicode printing if the terminal supports it, otherwise ascii')
216
+
217
+ parser.add_argument(
218
+ '-t', '--types',
219
+ dest='types',
220
+ action='store',
221
+ default=None,
222
+ metavar='TYPES',
223
+ choices=['gmpy', 'gmpy1', 'python'],
224
+ help='setup ground types: gmpy | gmpy1 | python; defaults to gmpy if gmpy2 '
225
+ 'or gmpy is installed, otherwise python')
226
+
227
+ parser.add_argument(
228
+ '-o', '--order',
229
+ dest='order',
230
+ action='store',
231
+ default=None,
232
+ metavar='ORDER',
233
+ choices=['lex', 'grlex', 'grevlex', 'rev-lex', 'rev-grlex', 'rev-grevlex', 'old', 'none'],
234
+ help='setup ordering of terms: [rev-]lex | [rev-]grlex | [rev-]grevlex | old | none; defaults to lex')
235
+
236
+ parser.add_argument(
237
+ '-q', '--quiet',
238
+ dest='quiet',
239
+ action='store_true',
240
+ default=False,
241
+ help='print only version information at startup')
242
+
243
+ parser.add_argument(
244
+ '-d', '--doctest',
245
+ dest='doctest',
246
+ action='store_true',
247
+ default=False,
248
+ help='use the doctest format for output (you can just copy and paste it)')
249
+
250
+ parser.add_argument(
251
+ '-C', '--no-cache',
252
+ dest='cache',
253
+ action='store_false',
254
+ default=True,
255
+ help='disable caching mechanism')
256
+
257
+ parser.add_argument(
258
+ '-a', '--auto-symbols',
259
+ dest='auto_symbols',
260
+ action='store_true',
261
+ default=False,
262
+ help='automatically construct missing symbols')
263
+
264
+ parser.add_argument(
265
+ '-i', '--int-to-Integer',
266
+ dest='auto_int_to_Integer',
267
+ action='store_true',
268
+ default=False,
269
+ help="automatically wrap int literals with Integer")
270
+
271
+ parser.add_argument(
272
+ '-I', '--interactive',
273
+ dest='interactive',
274
+ action='store_true',
275
+ default=False,
276
+ help="equivalent to -a -i")
277
+
278
+ parser.add_argument(
279
+ '-D', '--debug',
280
+ dest='debug',
281
+ action='store_true',
282
+ default=False,
283
+ help='enable debugging output')
284
+
285
+ (options, ipy_args) = parser.parse_known_args()
286
+ if '--' in ipy_args:
287
+ ipy_args.remove('--')
288
+
289
+ if not options.cache:
290
+ os.environ['SYMPY_USE_CACHE'] = 'no'
291
+
292
+ if options.types:
293
+ os.environ['SYMPY_GROUND_TYPES'] = options.types
294
+
295
+ if options.debug:
296
+ os.environ['SYMPY_DEBUG'] = str(options.debug)
297
+
298
+ if options.doctest:
299
+ options.pretty = 'no'
300
+ options.console = 'python'
301
+
302
+ session = options.console
303
+
304
+ if session is not None:
305
+ ipython = session == 'ipython'
306
+ else:
307
+ try:
308
+ import IPython # noqa: F401
309
+ ipython = True
310
+ except ImportError:
311
+ if not options.quiet:
312
+ from sympy.interactive.session import no_ipython
313
+ print(no_ipython)
314
+ ipython = False
315
+
316
+ args = {
317
+ 'pretty_print': True,
318
+ 'use_unicode': None,
319
+ 'use_latex': None,
320
+ 'order': None,
321
+ 'argv': ipy_args,
322
+ }
323
+
324
+ if options.pretty == 'unicode':
325
+ args['use_unicode'] = True
326
+ elif options.pretty == 'ascii':
327
+ args['use_unicode'] = False
328
+ elif options.pretty == 'no':
329
+ args['pretty_print'] = False
330
+
331
+ if options.order is not None:
332
+ args['order'] = options.order
333
+
334
+ args['quiet'] = options.quiet
335
+ args['auto_symbols'] = options.auto_symbols or options.interactive
336
+ args['auto_int_to_Integer'] = options.auto_int_to_Integer or options.interactive
337
+
338
+ from sympy.interactive import init_session
339
+ init_session(ipython, **args)
340
+
341
+ if __name__ == "__main__":
342
+ main()
venv/Lib/site-packages/jsonpointer.py ADDED
@@ -0,0 +1,348 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # python-json-pointer - An implementation of the JSON Pointer syntax
4
+ # https://github.com/stefankoegl/python-json-pointer
5
+ #
6
+ # Copyright (c) 2011 Stefan Kögl <[email protected]>
7
+ # All rights reserved.
8
+ #
9
+ # Redistribution and use in source and binary forms, with or without
10
+ # modification, are permitted provided that the following conditions
11
+ # are met:
12
+ #
13
+ # 1. Redistributions of source code must retain the above copyright
14
+ # notice, this list of conditions and the following disclaimer.
15
+ # 2. Redistributions in binary form must reproduce the above copyright
16
+ # notice, this list of conditions and the following disclaimer in the
17
+ # documentation and/or other materials provided with the distribution.
18
+ # 3. The name of the author may not be used to endorse or promote products
19
+ # derived from this software without specific prior written permission.
20
+ #
21
+ # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22
+ # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23
+ # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24
+ # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25
+ # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26
+ # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30
+ # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
+ #
32
+
33
+ """ Identify specific nodes in a JSON document (RFC 6901) """
34
+
35
+ # Will be parsed by setup.py to determine package metadata
36
+ __author__ = 'Stefan Kögl <[email protected]>'
37
+ __version__ = '3.0.0'
38
+ __website__ = 'https://github.com/stefankoegl/python-json-pointer'
39
+ __license__ = 'Modified BSD License'
40
+
41
+ import copy
42
+ import re
43
+ from collections.abc import Mapping, Sequence
44
+ from itertools import tee, chain
45
+
46
+ _nothing = object()
47
+
48
+
49
+ def set_pointer(doc, pointer, value, inplace=True):
50
+ """Resolves a pointer against doc and sets the value of the target within doc.
51
+
52
+ With inplace set to true, doc is modified as long as pointer is not the
53
+ root.
54
+
55
+ >>> obj = {'foo': {'anArray': [ {'prop': 44}], 'another prop': {'baz': 'A string' }}}
56
+
57
+ >>> set_pointer(obj, '/foo/anArray/0/prop', 55) == \
58
+ {'foo': {'another prop': {'baz': 'A string'}, 'anArray': [{'prop': 55}]}}
59
+ True
60
+
61
+ >>> set_pointer(obj, '/foo/yet another prop', 'added prop') == \
62
+ {'foo': {'another prop': {'baz': 'A string'}, 'yet another prop': 'added prop', 'anArray': [{'prop': 55}]}}
63
+ True
64
+
65
+ >>> obj = {'foo': {}}
66
+ >>> set_pointer(obj, '/foo/a%20b', 'x') == \
67
+ {'foo': {'a%20b': 'x' }}
68
+ True
69
+ """
70
+
71
+ pointer = JsonPointer(pointer)
72
+ return pointer.set(doc, value, inplace)
73
+
74
+
75
+ def resolve_pointer(doc, pointer, default=_nothing):
76
+ """ Resolves pointer against doc and returns the referenced object
77
+
78
+ >>> obj = {'foo': {'anArray': [ {'prop': 44}], 'another prop': {'baz': 'A string' }}, 'a%20b': 1, 'c d': 2}
79
+
80
+ >>> resolve_pointer(obj, '') == obj
81
+ True
82
+
83
+ >>> resolve_pointer(obj, '/foo') == obj['foo']
84
+ True
85
+
86
+ >>> resolve_pointer(obj, '/foo/another prop') == obj['foo']['another prop']
87
+ True
88
+
89
+ >>> resolve_pointer(obj, '/foo/another prop/baz') == obj['foo']['another prop']['baz']
90
+ True
91
+
92
+ >>> resolve_pointer(obj, '/foo/anArray/0') == obj['foo']['anArray'][0]
93
+ True
94
+
95
+ >>> resolve_pointer(obj, '/some/path', None) == None
96
+ True
97
+
98
+ >>> resolve_pointer(obj, '/a b', None) == None
99
+ True
100
+
101
+ >>> resolve_pointer(obj, '/a%20b') == 1
102
+ True
103
+
104
+ >>> resolve_pointer(obj, '/c d') == 2
105
+ True
106
+
107
+ >>> resolve_pointer(obj, '/c%20d', None) == None
108
+ True
109
+ """
110
+
111
+ pointer = JsonPointer(pointer)
112
+ return pointer.resolve(doc, default)
113
+
114
+
115
+ def pairwise(iterable):
116
+ """ Transforms a list to a list of tuples of adjacent items
117
+
118
+ s -> (s0,s1), (s1,s2), (s2, s3), ...
119
+
120
+ >>> list(pairwise([]))
121
+ []
122
+
123
+ >>> list(pairwise([1]))
124
+ []
125
+
126
+ >>> list(pairwise([1, 2, 3, 4]))
127
+ [(1, 2), (2, 3), (3, 4)]
128
+ """
129
+ a, b = tee(iterable)
130
+ for _ in b:
131
+ break
132
+ return zip(a, b)
133
+
134
+
135
+ class JsonPointerException(Exception):
136
+ pass
137
+
138
+
139
+ class EndOfList(object):
140
+ """Result of accessing element "-" of a list"""
141
+
142
+ def __init__(self, list_):
143
+ self.list_ = list_
144
+
145
+ def __repr__(self):
146
+ return '{cls}({lst})'.format(cls=self.__class__.__name__,
147
+ lst=repr(self.list_))
148
+
149
+
150
+ class JsonPointer(object):
151
+ """A JSON Pointer that can reference parts of a JSON document"""
152
+
153
+ # Array indices must not contain:
154
+ # leading zeros, signs, spaces, decimals, etc
155
+ _RE_ARRAY_INDEX = re.compile('0|[1-9][0-9]*$')
156
+ _RE_INVALID_ESCAPE = re.compile('(~[^01]|~$)')
157
+
158
+ def __init__(self, pointer):
159
+
160
+ # validate escapes
161
+ invalid_escape = self._RE_INVALID_ESCAPE.search(pointer)
162
+ if invalid_escape:
163
+ raise JsonPointerException('Found invalid escape {}'.format(
164
+ invalid_escape.group()))
165
+
166
+ parts = pointer.split('/')
167
+ if parts.pop(0) != '':
168
+ raise JsonPointerException('Location must start with /')
169
+
170
+ parts = [unescape(part) for part in parts]
171
+ self.parts = parts
172
+
173
+ def to_last(self, doc):
174
+ """Resolves ptr until the last step, returns (sub-doc, last-step)"""
175
+
176
+ if not self.parts:
177
+ return doc, None
178
+
179
+ for part in self.parts[:-1]:
180
+ doc = self.walk(doc, part)
181
+
182
+ return doc, JsonPointer.get_part(doc, self.parts[-1])
183
+
184
+ def resolve(self, doc, default=_nothing):
185
+ """Resolves the pointer against doc and returns the referenced object"""
186
+
187
+ for part in self.parts:
188
+
189
+ try:
190
+ doc = self.walk(doc, part)
191
+ except JsonPointerException:
192
+ if default is _nothing:
193
+ raise
194
+ else:
195
+ return default
196
+
197
+ return doc
198
+
199
+ get = resolve
200
+
201
+ def set(self, doc, value, inplace=True):
202
+ """Resolve the pointer against the doc and replace the target with value."""
203
+
204
+ if len(self.parts) == 0:
205
+ if inplace:
206
+ raise JsonPointerException('Cannot set root in place')
207
+ return value
208
+
209
+ if not inplace:
210
+ doc = copy.deepcopy(doc)
211
+
212
+ (parent, part) = self.to_last(doc)
213
+
214
+ if isinstance(parent, Sequence) and part == '-':
215
+ parent.append(value)
216
+ else:
217
+ parent[part] = value
218
+
219
+ return doc
220
+
221
+ @classmethod
222
+ def get_part(cls, doc, part):
223
+ """Returns the next step in the correct type"""
224
+
225
+ if isinstance(doc, Mapping):
226
+ return part
227
+
228
+ elif isinstance(doc, Sequence):
229
+
230
+ if part == '-':
231
+ return part
232
+
233
+ if not JsonPointer._RE_ARRAY_INDEX.match(str(part)):
234
+ raise JsonPointerException("'%s' is not a valid sequence index" % part)
235
+
236
+ return int(part)
237
+
238
+ elif hasattr(doc, '__getitem__'):
239
+ # Allow indexing via ducktyping
240
+ # if the target has defined __getitem__
241
+ return part
242
+
243
+ else:
244
+ raise JsonPointerException("Document '%s' does not support indexing, "
245
+ "must be mapping/sequence or support __getitem__" % type(doc))
246
+
247
+ def get_parts(self):
248
+ """Returns the list of the parts. For example, JsonPointer('/a/b').get_parts() == ['a', 'b']"""
249
+
250
+ return self.parts
251
+
252
+ def walk(self, doc, part):
253
+ """ Walks one step in doc and returns the referenced part """
254
+
255
+ part = JsonPointer.get_part(doc, part)
256
+
257
+ assert hasattr(doc, '__getitem__'), "invalid document type %s" % (type(doc),)
258
+
259
+ if isinstance(doc, Sequence):
260
+ if part == '-':
261
+ return EndOfList(doc)
262
+
263
+ try:
264
+ return doc[part]
265
+
266
+ except IndexError:
267
+ raise JsonPointerException("index '%s' is out of bounds" % (part,))
268
+
269
+ # Else the object is a mapping or supports __getitem__(so assume custom indexing)
270
+ try:
271
+ return doc[part]
272
+
273
+ except KeyError:
274
+ raise JsonPointerException("member '%s' not found in %s" % (part, doc))
275
+
276
+ def contains(self, ptr):
277
+ """ Returns True if self contains the given ptr """
278
+ return self.parts[:len(ptr.parts)] == ptr.parts
279
+
280
+ def __contains__(self, item):
281
+ """ Returns True if self contains the given ptr """
282
+ return self.contains(item)
283
+
284
+ def join(self, suffix):
285
+ """ Returns a new JsonPointer with the given suffix append to this ptr """
286
+ if isinstance(suffix, JsonPointer):
287
+ suffix_parts = suffix.parts
288
+ elif isinstance(suffix, str):
289
+ suffix_parts = JsonPointer(suffix).parts
290
+ else:
291
+ suffix_parts = suffix
292
+ try:
293
+ return JsonPointer.from_parts(chain(self.parts, suffix_parts))
294
+ except: # noqa E722
295
+ raise JsonPointerException("Invalid suffix")
296
+
297
+ def __truediv__(self, suffix): # Python 3
298
+ return self.join(suffix)
299
+
300
+ @property
301
+ def path(self):
302
+ """Returns the string representation of the pointer
303
+
304
+ >>> ptr = JsonPointer('/~0/0/~1').path == '/~0/0/~1'
305
+ """
306
+ parts = [escape(part) for part in self.parts]
307
+ return ''.join('/' + part for part in parts)
308
+
309
+ def __eq__(self, other):
310
+ """Compares a pointer to another object
311
+
312
+ Pointers can be compared by comparing their strings (or splitted
313
+ strings), because no two different parts can point to the same
314
+ structure in an object (eg no different number representations)
315
+ """
316
+
317
+ if not isinstance(other, JsonPointer):
318
+ return False
319
+
320
+ return self.parts == other.parts
321
+
322
+ def __hash__(self):
323
+ return hash(tuple(self.parts))
324
+
325
+ def __str__(self):
326
+ return self.path
327
+
328
+ def __repr__(self):
329
+ return type(self).__name__ + "(" + repr(self.path) + ")"
330
+
331
+ @classmethod
332
+ def from_parts(cls, parts):
333
+ """Constructs a JsonPointer from a list of (unescaped) paths
334
+
335
+ >>> JsonPointer.from_parts(['a', '~', '/', 0]).path == '/a/~0/~1/0'
336
+ True
337
+ """
338
+ parts = [escape(str(part)) for part in parts]
339
+ ptr = cls(''.join('/' + part for part in parts))
340
+ return ptr
341
+
342
+
343
+ def escape(s):
344
+ return s.replace('~', '~0').replace('/', '~1')
345
+
346
+
347
+ def unescape(s):
348
+ return s.replace('~1', '/').replace('~0', '~')
venv/Lib/site-packages/jupyter.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ """Launch the root jupyter command"""
2
+ from __future__ import annotations
3
+
4
+ if __name__ == "__main__":
5
+ from jupyter_core.command import main
6
+
7
+ main()
venv/Lib/site-packages/nest_asyncio.py ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Patch asyncio to allow nested event loops."""
2
+
3
+ import asyncio
4
+ import asyncio.events as events
5
+ import os
6
+ import sys
7
+ import threading
8
+ from contextlib import contextmanager, suppress
9
+ from heapq import heappop
10
+
11
+
12
+ def apply(loop=None):
13
+ """Patch asyncio to make its event loop reentrant."""
14
+ _patch_asyncio()
15
+ _patch_policy()
16
+ _patch_tornado()
17
+
18
+ loop = loop or asyncio.get_event_loop()
19
+ _patch_loop(loop)
20
+
21
+
22
+ def _patch_asyncio():
23
+ """Patch asyncio module to use pure Python tasks and futures."""
24
+
25
+ def run(main, *, debug=False):
26
+ loop = asyncio.get_event_loop()
27
+ loop.set_debug(debug)
28
+ task = asyncio.ensure_future(main)
29
+ try:
30
+ return loop.run_until_complete(task)
31
+ finally:
32
+ if not task.done():
33
+ task.cancel()
34
+ with suppress(asyncio.CancelledError):
35
+ loop.run_until_complete(task)
36
+
37
+ def _get_event_loop(stacklevel=3):
38
+ loop = events._get_running_loop()
39
+ if loop is None:
40
+ loop = events.get_event_loop_policy().get_event_loop()
41
+ return loop
42
+
43
+ # Use module level _current_tasks, all_tasks and patch run method.
44
+ if hasattr(asyncio, '_nest_patched'):
45
+ return
46
+ if sys.version_info >= (3, 6, 0):
47
+ asyncio.Task = asyncio.tasks._CTask = asyncio.tasks.Task = \
48
+ asyncio.tasks._PyTask
49
+ asyncio.Future = asyncio.futures._CFuture = asyncio.futures.Future = \
50
+ asyncio.futures._PyFuture
51
+ if sys.version_info < (3, 7, 0):
52
+ asyncio.tasks._current_tasks = asyncio.tasks.Task._current_tasks
53
+ asyncio.all_tasks = asyncio.tasks.Task.all_tasks
54
+ if sys.version_info >= (3, 9, 0):
55
+ events._get_event_loop = events.get_event_loop = \
56
+ asyncio.get_event_loop = _get_event_loop
57
+ asyncio.run = run
58
+ asyncio._nest_patched = True
59
+
60
+
61
+ def _patch_policy():
62
+ """Patch the policy to always return a patched loop."""
63
+
64
+ def get_event_loop(self):
65
+ if self._local._loop is None:
66
+ loop = self.new_event_loop()
67
+ _patch_loop(loop)
68
+ self.set_event_loop(loop)
69
+ return self._local._loop
70
+
71
+ policy = events.get_event_loop_policy()
72
+ policy.__class__.get_event_loop = get_event_loop
73
+
74
+
75
+ def _patch_loop(loop):
76
+ """Patch loop to make it reentrant."""
77
+
78
+ def run_forever(self):
79
+ with manage_run(self), manage_asyncgens(self):
80
+ while True:
81
+ self._run_once()
82
+ if self._stopping:
83
+ break
84
+ self._stopping = False
85
+
86
+ def run_until_complete(self, future):
87
+ with manage_run(self):
88
+ f = asyncio.ensure_future(future, loop=self)
89
+ if f is not future:
90
+ f._log_destroy_pending = False
91
+ while not f.done():
92
+ self._run_once()
93
+ if self._stopping:
94
+ break
95
+ if not f.done():
96
+ raise RuntimeError(
97
+ 'Event loop stopped before Future completed.')
98
+ return f.result()
99
+
100
+ def _run_once(self):
101
+ """
102
+ Simplified re-implementation of asyncio's _run_once that
103
+ runs handles as they become ready.
104
+ """
105
+ ready = self._ready
106
+ scheduled = self._scheduled
107
+ while scheduled and scheduled[0]._cancelled:
108
+ heappop(scheduled)
109
+
110
+ timeout = (
111
+ 0 if ready or self._stopping
112
+ else min(max(
113
+ scheduled[0]._when - self.time(), 0), 86400) if scheduled
114
+ else None)
115
+ event_list = self._selector.select(timeout)
116
+ self._process_events(event_list)
117
+
118
+ end_time = self.time() + self._clock_resolution
119
+ while scheduled and scheduled[0]._when < end_time:
120
+ handle = heappop(scheduled)
121
+ ready.append(handle)
122
+
123
+ for _ in range(len(ready)):
124
+ if not ready:
125
+ break
126
+ handle = ready.popleft()
127
+ if not handle._cancelled:
128
+ # preempt the current task so that that checks in
129
+ # Task.__step do not raise
130
+ curr_task = curr_tasks.pop(self, None)
131
+
132
+ try:
133
+ handle._run()
134
+ finally:
135
+ # restore the current task
136
+ if curr_task is not None:
137
+ curr_tasks[self] = curr_task
138
+
139
+ handle = None
140
+
141
+ @contextmanager
142
+ def manage_run(self):
143
+ """Set up the loop for running."""
144
+ self._check_closed()
145
+ old_thread_id = self._thread_id
146
+ old_running_loop = events._get_running_loop()
147
+ try:
148
+ self._thread_id = threading.get_ident()
149
+ events._set_running_loop(self)
150
+ self._num_runs_pending += 1
151
+ if self._is_proactorloop:
152
+ if self._self_reading_future is None:
153
+ self.call_soon(self._loop_self_reading)
154
+ yield
155
+ finally:
156
+ self._thread_id = old_thread_id
157
+ events._set_running_loop(old_running_loop)
158
+ self._num_runs_pending -= 1
159
+ if self._is_proactorloop:
160
+ if (self._num_runs_pending == 0
161
+ and self._self_reading_future is not None):
162
+ ov = self._self_reading_future._ov
163
+ self._self_reading_future.cancel()
164
+ if ov is not None:
165
+ self._proactor._unregister(ov)
166
+ self._self_reading_future = None
167
+
168
+ @contextmanager
169
+ def manage_asyncgens(self):
170
+ if not hasattr(sys, 'get_asyncgen_hooks'):
171
+ # Python version is too old.
172
+ return
173
+ old_agen_hooks = sys.get_asyncgen_hooks()
174
+ try:
175
+ self._set_coroutine_origin_tracking(self._debug)
176
+ if self._asyncgens is not None:
177
+ sys.set_asyncgen_hooks(
178
+ firstiter=self._asyncgen_firstiter_hook,
179
+ finalizer=self._asyncgen_finalizer_hook)
180
+ yield
181
+ finally:
182
+ self._set_coroutine_origin_tracking(False)
183
+ if self._asyncgens is not None:
184
+ sys.set_asyncgen_hooks(*old_agen_hooks)
185
+
186
+ def _check_running(self):
187
+ """Do not throw exception if loop is already running."""
188
+ pass
189
+
190
+ if hasattr(loop, '_nest_patched'):
191
+ return
192
+ if not isinstance(loop, asyncio.BaseEventLoop):
193
+ raise ValueError('Can\'t patch loop of type %s' % type(loop))
194
+ cls = loop.__class__
195
+ cls.run_forever = run_forever
196
+ cls.run_until_complete = run_until_complete
197
+ cls._run_once = _run_once
198
+ cls._check_running = _check_running
199
+ cls._check_runnung = _check_running # typo in Python 3.7 source
200
+ cls._num_runs_pending = 1 if loop.is_running() else 0
201
+ cls._is_proactorloop = (
202
+ os.name == 'nt' and issubclass(cls, asyncio.ProactorEventLoop))
203
+ if sys.version_info < (3, 7, 0):
204
+ cls._set_coroutine_origin_tracking = cls._set_coroutine_wrapper
205
+ curr_tasks = asyncio.tasks._current_tasks \
206
+ if sys.version_info >= (3, 7, 0) else asyncio.Task._current_tasks
207
+ cls._nest_patched = True
208
+
209
+
210
+ def _patch_tornado():
211
+ """
212
+ If tornado is imported before nest_asyncio, make tornado aware of
213
+ the pure-Python asyncio Future.
214
+ """
215
+ if 'tornado' in sys.modules:
216
+ import tornado.concurrent as tc # type: ignore
217
+ tc.Future = asyncio.Future
218
+ if asyncio.Future not in tc.FUTURES:
219
+ tc.FUTURES += (asyncio.Future,)
venv/Lib/site-packages/numpy-2.2.5-cp312-cp312-win_amd64.whl ADDED
File without changes
venv/Lib/site-packages/pandocfilters.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Author: John MacFarlane <[email protected]>
2
+ # Copyright: (C) 2013 John MacFarlane
3
+ # License: BSD3
4
+
5
+ """
6
+ Functions to aid writing python scripts that process the pandoc
7
+ AST serialized as JSON.
8
+ """
9
+
10
+ import codecs
11
+ import hashlib
12
+ import io
13
+ import json
14
+ import os
15
+ import sys
16
+ import atexit
17
+ import shutil
18
+ import tempfile
19
+
20
+
21
+ # some utility-functions: make it easier to create your own filters
22
+
23
+
24
+ def get_filename4code(module, content, ext=None):
25
+ """Generate filename based on content
26
+
27
+ The function ensures that the (temporary) directory exists, so that the
28
+ file can be written.
29
+
30
+ By default, the directory won't be cleaned up,
31
+ so a filter can use the directory as a cache and
32
+ decide not to regenerate if there's no change.
33
+
34
+ In case the user preferres the files to be temporary files,
35
+ an environment variable `PANDOCFILTER_CLEANUP` can be set to
36
+ any non-empty value such as `1` to
37
+ make sure the directory is created in a temporary location and removed
38
+ after finishing the filter. In this case there's no caching and files
39
+ will be regenerated each time the filter is run.
40
+
41
+ Example:
42
+ filename = get_filename4code("myfilter", code)
43
+ """
44
+ if os.getenv('PANDOCFILTER_CLEANUP'):
45
+ imagedir = tempfile.mkdtemp(prefix=module)
46
+ atexit.register(lambda: shutil.rmtree(imagedir))
47
+ else:
48
+ imagedir = module + "-images"
49
+ fn = hashlib.sha1(content.encode(sys.getfilesystemencoding())).hexdigest()
50
+ try:
51
+ os.makedirs(imagedir, exist_ok=True)
52
+ sys.stderr.write('Created directory ' + imagedir + '\n')
53
+ except OSError:
54
+ sys.stderr.write('Could not create directory "' + imagedir + '"\n')
55
+ if ext:
56
+ fn += "." + ext
57
+ return os.path.join(imagedir, fn)
58
+
59
+ def get_value(kv, key, value = None):
60
+ """get value from the keyvalues (options)"""
61
+ res = []
62
+ for k, v in kv:
63
+ if k == key:
64
+ value = v
65
+ else:
66
+ res.append([k, v])
67
+ return value, res
68
+
69
+ def get_caption(kv):
70
+ """get caption from the keyvalues (options)
71
+
72
+ Example:
73
+ if key == 'CodeBlock':
74
+ [[ident, classes, keyvals], code] = value
75
+ caption, typef, keyvals = get_caption(keyvals)
76
+ ...
77
+ return Para([Image([ident, [], keyvals], caption, [filename, typef])])
78
+ """
79
+ caption = []
80
+ typef = ""
81
+ value, res = get_value(kv, u"caption")
82
+ if value is not None:
83
+ caption = [Str(value)]
84
+ typef = "fig:"
85
+
86
+ return caption, typef, res
87
+
88
+
89
+ def get_extension(format, default, **alternates):
90
+ """get the extension for the result, needs a default and some specialisations
91
+
92
+ Example:
93
+ filetype = get_extension(format, "png", html="svg", latex="eps")
94
+ """
95
+ try:
96
+ return alternates[format]
97
+ except KeyError:
98
+ return default
99
+
100
+ # end of utilities
101
+
102
+
103
+ def walk(x, action, format, meta):
104
+ """Walk a tree, applying an action to every object.
105
+ Returns a modified tree. An action is a function of the form
106
+ `action(key, value, format, meta)`, where:
107
+
108
+ * `key` is the type of the pandoc object (e.g. 'Str', 'Para') `value` is
109
+ * the contents of the object (e.g. a string for 'Str', a list of
110
+ inline elements for 'Para')
111
+ * `format` is the target output format (as supplied by the
112
+ `format` argument of `walk`)
113
+ * `meta` is the document's metadata
114
+
115
+ The return of an action is either:
116
+
117
+ * `None`: this means that the object should remain unchanged
118
+ * a pandoc object: this will replace the original object
119
+ * a list of pandoc objects: these will replace the original object; the
120
+ list is merged with the neighbors of the orignal objects (spliced into
121
+ the list the original object belongs to); returning an empty list deletes
122
+ the object
123
+ """
124
+ if isinstance(x, list):
125
+ array = []
126
+ for item in x:
127
+ if isinstance(item, dict) and 't' in item:
128
+ res = action(item['t'],
129
+ item['c'] if 'c' in item else None, format, meta)
130
+ if res is None:
131
+ array.append(walk(item, action, format, meta))
132
+ elif isinstance(res, list):
133
+ for z in res:
134
+ array.append(walk(z, action, format, meta))
135
+ else:
136
+ array.append(walk(res, action, format, meta))
137
+ else:
138
+ array.append(walk(item, action, format, meta))
139
+ return array
140
+ elif isinstance(x, dict):
141
+ return {k: walk(v, action, format, meta) for k, v in x.items()}
142
+ else:
143
+ return x
144
+
145
+ def toJSONFilter(action):
146
+ """Like `toJSONFilters`, but takes a single action as argument.
147
+ """
148
+ toJSONFilters([action])
149
+
150
+
151
+ def toJSONFilters(actions):
152
+ """Generate a JSON-to-JSON filter from stdin to stdout
153
+
154
+ The filter:
155
+
156
+ * reads a JSON-formatted pandoc document from stdin
157
+ * transforms it by walking the tree and performing the actions
158
+ * returns a new JSON-formatted pandoc document to stdout
159
+
160
+ The argument `actions` is a list of functions of the form
161
+ `action(key, value, format, meta)`, as described in more
162
+ detail under `walk`.
163
+
164
+ This function calls `applyJSONFilters`, with the `format`
165
+ argument provided by the first command-line argument,
166
+ if present. (Pandoc sets this by default when calling
167
+ filters.)
168
+ """
169
+ try:
170
+ input_stream = io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8')
171
+ except AttributeError:
172
+ # Python 2 does not have sys.stdin.buffer.
173
+ # REF: https://stackoverflow.com/questions/2467928/python-unicodeencode
174
+ input_stream = codecs.getreader("utf-8")(sys.stdin)
175
+
176
+ source = input_stream.read()
177
+ if len(sys.argv) > 1:
178
+ format = sys.argv[1]
179
+ else:
180
+ format = ""
181
+
182
+ sys.stdout.write(applyJSONFilters(actions, source, format))
183
+
184
+ def applyJSONFilters(actions, source, format=""):
185
+ """Walk through JSON structure and apply filters
186
+
187
+ This:
188
+
189
+ * reads a JSON-formatted pandoc document from a source string
190
+ * transforms it by walking the tree and performing the actions
191
+ * returns a new JSON-formatted pandoc document as a string
192
+
193
+ The `actions` argument is a list of functions (see `walk`
194
+ for a full description).
195
+
196
+ The argument `source` is a string encoded JSON object.
197
+
198
+ The argument `format` is a string describing the output format.
199
+
200
+ Returns a the new JSON-formatted pandoc document.
201
+ """
202
+
203
+ doc = json.loads(source)
204
+
205
+ if 'meta' in doc:
206
+ meta = doc['meta']
207
+ elif doc[0]: # old API
208
+ meta = doc[0]['unMeta']
209
+ else:
210
+ meta = {}
211
+ altered = doc
212
+ for action in actions:
213
+ altered = walk(altered, action, format, meta)
214
+
215
+ return json.dumps(altered)
216
+
217
+
218
+ def stringify(x):
219
+ """Walks the tree x and returns concatenated string content,
220
+ leaving out all formatting.
221
+ """
222
+ result = []
223
+
224
+ def go(key, val, format, meta):
225
+ if key in ['Str', 'MetaString']:
226
+ result.append(val)
227
+ elif key == 'Code':
228
+ result.append(val[1])
229
+ elif key == 'Math':
230
+ result.append(val[1])
231
+ elif key == 'LineBreak':
232
+ result.append(" ")
233
+ elif key == 'SoftBreak':
234
+ result.append(" ")
235
+ elif key == 'Space':
236
+ result.append(" ")
237
+
238
+ walk(x, go, "", {})
239
+ return ''.join(result)
240
+
241
+
242
+ def attributes(attrs):
243
+ """Returns an attribute list, constructed from the
244
+ dictionary attrs.
245
+ """
246
+ attrs = attrs or {}
247
+ ident = attrs.get("id", "")
248
+ classes = attrs.get("classes", [])
249
+ keyvals = [[x, attrs[x]] for x in attrs if (x != "classes" and x != "id")]
250
+ return [ident, classes, keyvals]
251
+
252
+
253
+ def elt(eltType, numargs):
254
+ def fun(*args):
255
+ lenargs = len(args)
256
+ if lenargs != numargs:
257
+ raise ValueError(eltType + ' expects ' + str(numargs) +
258
+ ' arguments, but given ' + str(lenargs))
259
+ if numargs == 0:
260
+ xs = []
261
+ elif len(args) == 1:
262
+ xs = args[0]
263
+ else:
264
+ xs = list(args)
265
+ return {'t': eltType, 'c': xs}
266
+ return fun
267
+
268
+ # Constructors for block elements
269
+
270
+ Plain = elt('Plain', 1)
271
+ Para = elt('Para', 1)
272
+ CodeBlock = elt('CodeBlock', 2)
273
+ RawBlock = elt('RawBlock', 2)
274
+ BlockQuote = elt('BlockQuote', 1)
275
+ OrderedList = elt('OrderedList', 2)
276
+ BulletList = elt('BulletList', 1)
277
+ DefinitionList = elt('DefinitionList', 1)
278
+ Header = elt('Header', 3)
279
+ HorizontalRule = elt('HorizontalRule', 0)
280
+ Table = elt('Table', 5)
281
+ Div = elt('Div', 2)
282
+ Null = elt('Null', 0)
283
+
284
+ # Constructors for inline elements
285
+
286
+ Str = elt('Str', 1)
287
+ Emph = elt('Emph', 1)
288
+ Strong = elt('Strong', 1)
289
+ Strikeout = elt('Strikeout', 1)
290
+ Superscript = elt('Superscript', 1)
291
+ Subscript = elt('Subscript', 1)
292
+ SmallCaps = elt('SmallCaps', 1)
293
+ Quoted = elt('Quoted', 2)
294
+ Cite = elt('Cite', 2)
295
+ Code = elt('Code', 2)
296
+ Space = elt('Space', 0)
297
+ LineBreak = elt('LineBreak', 0)
298
+ Math = elt('Math', 2)
299
+ RawInline = elt('RawInline', 2)
300
+ Link = elt('Link', 3)
301
+ Image = elt('Image', 3)
302
+ Note = elt('Note', 1)
303
+ SoftBreak = elt('SoftBreak', 0)
304
+ Span = elt('Span', 2)
venv/Lib/site-packages/pylab.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from matplotlib.pylab import * # noqa: F401, F403
2
+ import matplotlib.pylab
3
+ __doc__ = matplotlib.pylab.__doc__
venv/Lib/site-packages/pythoncom.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Magic utility that "redirects" to pythoncomXX.dll
2
+ import pywintypes
3
+
4
+ pywintypes.__import_pywin32_system_module__("pythoncom", globals())
venv/Lib/site-packages/pywin32.version.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ 310
venv/Lib/site-packages/rfc3339_validator.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+
3
+ __author__ = """Nicolas Aimetti"""
4
+ __email__ = '[email protected]'
5
+ __version__ = '0.1.4'
6
+
7
+ import re
8
+ import calendar
9
+ import six
10
+
11
+ RFC3339_REGEX_FLAGS = 0
12
+ if six.PY3:
13
+ RFC3339_REGEX_FLAGS |= re.ASCII
14
+
15
+ RFC3339_REGEX = re.compile(r"""
16
+ ^
17
+ (\d{4}) # Year
18
+ -
19
+ (0[1-9]|1[0-2]) # Month
20
+ -
21
+ (\d{2}) # Day
22
+ T
23
+ (?:[01]\d|2[0123]) # Hours
24
+ :
25
+ (?:[0-5]\d) # Minutes
26
+ :
27
+ (?:[0-5]\d) # Seconds
28
+ (?:\.\d+)? # Secfrac
29
+ (?: Z # UTC
30
+ | [+-](?:[01]\d|2[0123]):[0-5]\d # Offset
31
+ )
32
+ $
33
+ """, re.VERBOSE | RFC3339_REGEX_FLAGS)
34
+
35
+
36
+ def validate_rfc3339(date_string):
37
+ """
38
+ Validates dates against RFC3339 datetime format
39
+ Leap seconds are no supported.
40
+ """
41
+ m = RFC3339_REGEX.match(date_string)
42
+ if m is None:
43
+ return False
44
+ year, month, day = map(int, m.groups())
45
+ if not year:
46
+ # Year 0 is not valid a valid date
47
+ return False
48
+ (_, max_day) = calendar.monthrange(year, month)
49
+ if not 1 <= day <= max_day:
50
+ return False
51
+ return True
venv/Lib/site-packages/rfc3986_validator.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+ __version__ = '0.1.1'
4
+ __author__ = 'Nicolas Aimetti <[email protected]>'
5
+ __all__ = ['validate_rfc3986']
6
+
7
+ # Following regex rules references the ABNF terminology from
8
+ # [RFC3986](https://tools.ietf.org/html/rfc3986#appendix-A)
9
+
10
+
11
+ # IPv6 validation rule
12
+ IPv6_RE = (
13
+ r"(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9]["
14
+ r"0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,"
15
+ r"4}:[0-9A-Fa-f]{1,4}|(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9]["
16
+ r"0-9]?))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:25[0-5]|2["
17
+ r"0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|(?:(?:[0-9A-Fa-f]{1,"
18
+ r"4}:)?[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:25[0-5]|2[0-4]["
19
+ r"0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|(?:(?:[0-9A-Fa-f]{1,4}:){,"
20
+ r"2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:25[0-5]|2[0-4]["
21
+ r"0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|(?:(?:[0-9A-Fa-f]{1,4}:){,"
22
+ r"3}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:)(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:25[0-5]|2[0-4][0-9]|["
23
+ r"01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,"
24
+ r"4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2["
25
+ r"0-4][0-9]|[01]?[0-9][0-9]?))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:["
26
+ r"0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)"
27
+ )
28
+
29
+
30
+ # An authority is defined as: [ userinfo "@" ] host [ ":" port ]
31
+ # \[(?:{ip_v6} | v[0-9A-Fa-f]+\.[a-zA-Z0-9_.~\-!$ & '()*+,;=:]+)\] # IP-literal
32
+ AUTHORITY_RE = r"""
33
+ (?:(?:[a-zA-Z0-9_.~\-!$&'()*+,;=:]|%[0-9A-Fa-f]{{2}})*@)? # user info
34
+ (?:
35
+ \[(?:{ip_v6}|v[0-9A-Fa-f]+\.[a-zA-Z0-9_.~\-!$&'()*+,;=:]+)\] # IP-literal
36
+ | (?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){{3}}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?) # IPv4
37
+ | (?:[a-zA-Z0-9_.~\-!$&'()*+,;=]|%[0-9A-Fa-f]{{2}})* # reg-name
38
+ ) # host
39
+ (?::[0-9]*)? # port
40
+ """.format(ip_v6=IPv6_RE,)
41
+ # Path char regex rule
42
+ PCHAR_RE = r"(?:[a-zA-Z0-9_.~\-!$&'()*+,;=:@]|%[0-9A-Fa-f]{2})"
43
+ # Query and Fragment rules are exactly the same
44
+ QUERY_RE = r"(?:[a-zA-Z0-9_.~\-!$&'()*+,;=:@/?]|%[0-9A-Fa-f]{2})*"
45
+ # An URI is defined as: scheme ":" hier-part [ "?" query ] [ "#" fragment ]
46
+ URI_RE = r"""
47
+ [a-zA-Z][a-zA-Z0-9+.-]* #scheme
48
+ :
49
+ (?:
50
+ //
51
+ {authority}
52
+ (?:/{pchar}*)* # path-abempty
53
+ | /(?:{pchar}+ (?:/{pchar}*)*)? # path-absolute
54
+ | {pchar}+ (?:/{pchar}*)* # path-rootless
55
+ | # or nothing
56
+ ) # hier-part
57
+ (?:\?{query})? # Query
58
+ (?:\#{fragment})? # Fragment
59
+ """.format(
60
+ authority=AUTHORITY_RE,
61
+ query=QUERY_RE,
62
+ fragment=QUERY_RE,
63
+ pchar=PCHAR_RE
64
+ )
65
+
66
+ # A relative-ref is defined as: relative-part [ "?" query ] [ "#" fragment ]
67
+ RELATIVE_REF_RE = r"""
68
+ (?:
69
+ //
70
+ {authority}
71
+ (?:/{pchar}*)* # path-abempty
72
+ | /(?:{pchar}+ (?:/{pchar}*)*)? # path-absolute
73
+ | (?:[a-zA-Z0-9_.~\-!$&'()*+,;=@]|%[0-9A-Fa-f]{{2}})+ (?:/{pchar}*)* # path-noscheme
74
+ | # or nothing
75
+ ) # relative-part
76
+ (?:\?{query})? # Query
77
+ (?:\#{fragment})? # Fragment
78
+ """.format(
79
+ authority=AUTHORITY_RE,
80
+ query=QUERY_RE,
81
+ fragment=QUERY_RE,
82
+ pchar=PCHAR_RE
83
+ )
84
+ # Compiled URI regex rule
85
+ URI_RE_COMP = re.compile(r"^{uri_re}$".format(uri_re=URI_RE), re.VERBOSE)
86
+ # Compiled URI-reference regex rule. URI-reference is defined as: URI / relative-ref
87
+ URI_REF_RE_COMP = re.compile(r"^(?:{uri_re}|{relative_ref})$".format(
88
+ uri_re=URI_RE,
89
+ relative_ref=RELATIVE_REF_RE,
90
+ ), re.VERBOSE)
91
+
92
+
93
+ def validate_rfc3986(url, rule='URI'):
94
+ """
95
+ Validates strings according to RFC3986
96
+
97
+ :param url: String cointaining URI to validate
98
+ :param rule: It could be 'URI' (default) or 'URI_reference'.
99
+ :return: True or False
100
+ """
101
+ if rule == 'URI':
102
+ return URI_RE_COMP.match(url)
103
+ elif rule == 'URI_reference':
104
+ return URI_REF_RE_COMP.match(url)
105
+ else:
106
+ raise ValueError('Invalid rule')
venv/etc/jupyter/jupyter_server_config.d/jupyter_server_terminals.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "ServerApp": {
3
+ "jpserver_extensions": {
4
+ "jupyter_server_terminals": true
5
+ }
6
+ }
7
+ }
venv/etc/jupyter/jupyter_server_config.d/jupyterlab.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "ServerApp": {
3
+ "jpserver_extensions": {
4
+ "jupyterlab": true
5
+ }
6
+ }
7
+ }
venv/etc/jupyter/jupyter_server_config.d/notebook.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "ServerApp": {
3
+ "jpserver_extensions": {
4
+ "notebook": true
5
+ }
6
+ }
7
+ }
venv/etc/jupyter/jupyter_server_config.d/notebook_shim.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "ServerApp": {
3
+ "jpserver_extensions": {
4
+ "notebook_shim": true
5
+ }
6
+ }
7
+ }
venv/etc/jupyter/labconfig/page_config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "lockedExtensions": {}
3
+ }
venv/etc/jupyter/nbconfig/notebook.d/widgetsnbextension.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "load_extensions": {
3
+ "jupyter-js-widgets/extension": true
4
+ }
5
+ }