diff --git a/README.md b/README.md
index 24c798b..ec31192 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,7 @@
+
+
+//testing git.... :)
+
+
A beginner tutorial series for everyone whosoever is enthusiatic to begin with Python apps.
This is the first commit in series, and I am beginning with Flask-app tutorial.
diff --git a/basic python programmes/.idea/encodings.xml b/basic python programmes/.idea/encodings.xml
new file mode 100644
index 0000000..15a15b2
--- /dev/null
+++ b/basic python programmes/.idea/encodings.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/basic python programmes/.idea/misc.xml b/basic python programmes/.idea/misc.xml
new file mode 100644
index 0000000..06d6bc1
--- /dev/null
+++ b/basic python programmes/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/basic python programmes/.idea/modules.xml b/basic python programmes/.idea/modules.xml
new file mode 100644
index 0000000..6514061
--- /dev/null
+++ b/basic python programmes/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/basic python programmes/.idea/pythonp.iml b/basic python programmes/.idea/pythonp.iml
new file mode 100644
index 0000000..85c7612
--- /dev/null
+++ b/basic python programmes/.idea/pythonp.iml
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/basic python programmes/.idea/workspace.xml b/basic python programmes/.idea/workspace.xml
new file mode 100644
index 0000000..e7bef1d
--- /dev/null
+++ b/basic python programmes/.idea/workspace.xml
@@ -0,0 +1,174 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1545323175328
+
+
+ 1545323175328
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/basic python programmes/ascii.py b/basic python programmes/ascii.py
new file mode 100644
index 0000000..6f29ceb
--- /dev/null
+++ b/basic python programmes/ascii.py
@@ -0,0 +1,18 @@
+'''this programme converts asccii code to character
+and a character into an ascii code'''
+print("Choose an option from below : ")
+print("1.covert ascii to character.")
+print('2.Covert character to ascii')
+i = int(input('Enter choice : '))
+if i == 1:
+ print('Enter a number from 1 - 256 ')
+ num = int(input("NUMBER : "))
+ print("your character : ", chr(num))
+# chr returns a string of one character
+elif i == 2:
+ print('Enter a character ')
+ char = input("CHARACTER : ")
+ print("your number : ", ord(char))
+# ord returns code for one character
+else:
+ print('Invalid input ')
diff --git a/basic python programmes/calculator.py b/basic python programmes/calculator.py
new file mode 100644
index 0000000..5fb582e
--- /dev/null
+++ b/basic python programmes/calculator.py
@@ -0,0 +1,32 @@
+# calculator
+num1 = int(input('Enter first number : '))
+
+print('Choose one of the following operators :')
+print('1> +\n2> -\n3> *\n4> /\n5> ^ ')
+
+op = input('Operator : ')
+
+num2 = int(input('Enter second number : '))
+
+ans = 1
+
+if op == '+':
+ ans = num1 + num2
+
+elif op == '-':
+ ans = num1 - num2
+
+elif op == '*':
+ ans = num1 * num2
+
+elif op == '/':
+ ans = num1 / num2
+
+elif op == '^':
+ ans = num1 ** num2
+
+else:
+ print('Invalid operator input..exiting')
+ exit()
+
+print(num1, op, num2, '=', ans)
diff --git a/basic python programmes/circlearea.py b/basic python programmes/circlearea.py
new file mode 100644
index 0000000..32657f9
--- /dev/null
+++ b/basic python programmes/circlearea.py
@@ -0,0 +1,8 @@
+# programme to find circumference and are of a circle
+import math
+print("Enter the radius of the circle :")
+r = float(input("RADIUS : "))
+c = 2 * math.pi * r
+ar = math.pi * pow(r, 2)
+print("AREA : ", ar)
+print("CIRCUMFERENCE : ", c)
diff --git a/basic python programmes/firstfile.py b/basic python programmes/firstfile.py
new file mode 100644
index 0000000..d6a54bf
--- /dev/null
+++ b/basic python programmes/firstfile.py
@@ -0,0 +1,7 @@
+x=float(input('Enter First Number : '))
+y=float(input('Enter Second Number : '))
+z=float(input('Enter Third Number : '))
+
+print('The max value is ',max(x,y,z))
+
+input('Press anykey to exit..')
diff --git a/basic python programmes/funcalc.py b/basic python programmes/funcalc.py
new file mode 100644
index 0000000..fbf8fc6
--- /dev/null
+++ b/basic python programmes/funcalc.py
@@ -0,0 +1,54 @@
+# calculator using functions
+
+
+def add(num1, num2):
+ ans = num1 + num2
+ return ans
+
+
+def sub(num1, num2):
+ ans = num1 - num2
+ return ans
+
+
+def mult(num1, num2):
+ ans = num1 * num2
+ return ans
+
+
+def dev(num1, num2):
+ ans = num1 / num2
+ return ans
+
+
+def pow(num1, num2):
+ ans = num1 ** num2
+ return ans
+
+
+""" end of functions
+ the so called main"""
+
+
+rep = 'yes'
+while rep == 'yes':
+ num1 = float(input('Enter number 1 : '))
+ print('Enter one of the following operator :\n+ - * / ^ ')
+ op = input('Operator : ')
+ num2 = float(input('Enter number 2 : '))
+ if op == '+':
+ ans = add(num1, num2)
+ elif op == '-':
+ ans = sub(num1, num2)
+ elif op == '*':
+ ans = mult(num1, num2)
+ elif op == '/':
+ ans = dev(num1, num2)
+ elif op == '^':
+ ans = pow(num1, num2)
+ else:
+ print('Invalid input .....Exiting ')
+ exit()
+
+ print(num1, op, num2, '=', ans)
+ rep = input('Do you want to countinue(yes/no) ? :')
diff --git a/basic python programmes/listsasarray.py b/basic python programmes/listsasarray.py
new file mode 100644
index 0000000..6141eb6
--- /dev/null
+++ b/basic python programmes/listsasarray.py
@@ -0,0 +1,12 @@
+# using lists as array
+n = int(input('Enter the number of elements of the array : '))
+arr = []
+print('Enter the elements')
+count = 0
+for x in range(n):
+ print('Element', count + 1, ':')
+ ele = int(input())
+ count += 1
+ arr.append(ele)
+
+print('Array = ', arr)
diff --git a/basic python programmes/palindrome.py b/basic python programmes/palindrome.py
new file mode 100644
index 0000000..5858de3
--- /dev/null
+++ b/basic python programmes/palindrome.py
@@ -0,0 +1,15 @@
+# to check if the given number is palindrome or not
+
+print('Enter a number ')
+num = int(input('NUMBER : '))
+rev = 0
+temp = num
+while num > 0:
+ b = num % 10
+ rev = (rev * 10) + b
+ num = num // 10
+
+if temp == rev:
+ print('Palindrome')
+else:
+ print('Not a Palindrome')
diff --git a/basic python programmes/prime.py b/basic python programmes/prime.py
new file mode 100644
index 0000000..62b157f
--- /dev/null
+++ b/basic python programmes/prime.py
@@ -0,0 +1,7 @@
+# programme to check if a number is prime or not
+print('Enter a number to check')
+num = int(input('NUMBER : '))
+if num % 2 == 0:
+ print('The number is composite')
+else:
+ print('The number is prime')
diff --git a/basic python programmes/secondprog.py b/basic python programmes/secondprog.py
new file mode 100644
index 0000000..5f0782c
--- /dev/null
+++ b/basic python programmes/secondprog.py
@@ -0,0 +1,5 @@
+x = int(input("Enter number 1 : "))
+y = int(input("Enter number 2 : "))
+z = int(input("Enter NUmber 3 : "))
+print("Max value is : ", max(x, y, z))
+input("Enter any key to exit..")
diff --git a/basic python programmes/strings.py b/basic python programmes/strings.py
new file mode 100644
index 0000000..3aa2abc
--- /dev/null
+++ b/basic python programmes/strings.py
@@ -0,0 +1,16 @@
+# programme to calculate number of spaces in a string
+str = input('Enter a string : ')
+space = 0
+vovels = 0
+char = 0
+for x in str:
+ if x == ' ':
+ space += 1
+ elif x == 'a' or x == 'A' or x == 'e' or x == 'E' or x == 'i' or x == 'I' or x == 'o' or x == 'O' or x == 'u' or x == 'U':
+ vovels += 1
+ if x != ' ':
+ char += 1
+
+print("Number of spaces : ", space)
+print("Number of vovels : ", vovels)
+print("Number of characters : ", char)
diff --git a/basic python programmes/strings2.py b/basic python programmes/strings2.py
new file mode 100644
index 0000000..ae8ad28
--- /dev/null
+++ b/basic python programmes/strings2.py
@@ -0,0 +1,9 @@
+# programme to remove all vovels from a string
+
+vovels = ('a', 'A', 'e', 'E', 'i', 'I', 'o', 'O', 'u', 'U')
+text = input('Enter a string : ')
+for x in text:
+ if x in vovels:
+ text = text.replace(x, '')
+
+print('New text : ', text)
diff --git a/basic python programmes/strings3.py b/basic python programmes/strings3.py
new file mode 100644
index 0000000..f74507c
--- /dev/null
+++ b/basic python programmes/strings3.py
@@ -0,0 +1,8 @@
+# programme to convert lower case letters in string to upper case
+import string
+text = input('Enter a string : ')
+for x in text:
+ if x in string.ascii_lowercase:
+ text = text.replace(x, x.upper())
+
+print('New string : ', text)
diff --git a/basic python programmes/subtexttest.py b/basic python programmes/subtexttest.py
new file mode 100644
index 0000000..3acc78b
--- /dev/null
+++ b/basic python programmes/subtexttest.py
@@ -0,0 +1,4 @@
+x = int(input('Enter number 1 : '))
+y = int(input('Enter number 2 : '))
+z = int(input('Enter number 3 : '))
+print("The greatest number is : ", max(x, y, z))
diff --git a/basic python programmes/third.py b/basic python programmes/third.py
new file mode 100644
index 0000000..c759e8a
--- /dev/null
+++ b/basic python programmes/third.py
@@ -0,0 +1,7 @@
+name = 'Nirbhay Vashisht'
+age = 19
+college = 'BMCEM'
+course = 'btech'
+Sem = 3
+print("Hello {0} of age {1} ! You are persuing {2} sem {3} in college {4}".format(
+ name, age, course, Sem, college))
diff --git a/basic python programmes/venv/Lib/site-packages/__pycache__/mccabe.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/__pycache__/mccabe.cpython-37.pyc
new file mode 100644
index 0000000..aec4114
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/__pycache__/mccabe.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/__pycache__/six.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/__pycache__/six.cpython-37.pyc
new file mode 100644
index 0000000..208d32a
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/__pycache__/six.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/DESCRIPTION.rst b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/DESCRIPTION.rst
new file mode 100644
index 0000000..0bcf160
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/DESCRIPTION.rst
@@ -0,0 +1,76 @@
+Astroid
+=======
+
+.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master
+ :target: https://travis-ci.org/PyCQA/astroid
+
+.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true
+ :alt: AppVeyor Build Status
+ :target: https://ci.appveyor.com/project/PCManticore/astroid
+
+.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master
+ :target: https://coveralls.io/github/PyCQA/astroid?branch=master
+
+.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest
+ :target: http://astroid.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+
+
+What's this?
+------------
+
+The aim of this module is to provide a common base representation of
+python source code. It is currently the library powering pylint's capabilities.
+
+It provides a compatible representation which comes from the `_ast`
+module. It rebuilds the tree generated by the builtin _ast module by
+recursively walking down the AST and building an extended ast. The new
+node classes have additional methods and attributes for different
+usages. They include some support for static inference and local name
+scopes. Furthermore, astroid can also build partial trees by inspecting living
+objects.
+
+
+Installation
+------------
+
+Extract the tarball, jump into the created directory and run::
+
+ pip install .
+
+
+If you want to do an editable installation, you can run::
+
+ pip install -e .
+
+
+If you have any questions, please mail the code-quality@python.org
+mailing list for support. See
+http://mail.python.org/mailman/listinfo/code-quality for subscription
+information and archives.
+
+Documentation
+-------------
+http://astroid.readthedocs.io/en/latest/
+
+
+Python Versions
+---------------
+
+astroid 2.0 is currently available for Python 3 only. If you want Python 2
+support, older versions of astroid will still supported until 2020.
+
+Test
+----
+
+Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use
+either `tox` or `pytest`::
+
+ tox
+ pytest astroid
+
+
diff --git a/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/INSTALLER b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/METADATA b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/METADATA
new file mode 100644
index 0000000..f3e913a
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/METADATA
@@ -0,0 +1,103 @@
+Metadata-Version: 2.0
+Name: astroid
+Version: 2.1.0
+Summary: An abstract syntax tree for Python with inference support.
+Home-page: https://github.com/PyCQA/astroid
+Author: Python Code Quality Authority
+Author-email: code-quality@python.org
+License: LGPL
+Description-Content-Type: UNKNOWN
+Platform: UNKNOWN
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Quality Assurance
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.4.*
+Requires-Dist: lazy-object-proxy
+Requires-Dist: six
+Requires-Dist: wrapt
+Requires-Dist: typing; python_version < "3.5"
+Requires-Dist: typed-ast; python_version < "3.7" and implementation_name == "cpython"
+
+Astroid
+=======
+
+.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master
+ :target: https://travis-ci.org/PyCQA/astroid
+
+.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true
+ :alt: AppVeyor Build Status
+ :target: https://ci.appveyor.com/project/PCManticore/astroid
+
+.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master
+ :target: https://coveralls.io/github/PyCQA/astroid?branch=master
+
+.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest
+ :target: http://astroid.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+
+
+What's this?
+------------
+
+The aim of this module is to provide a common base representation of
+python source code. It is currently the library powering pylint's capabilities.
+
+It provides a compatible representation which comes from the `_ast`
+module. It rebuilds the tree generated by the builtin _ast module by
+recursively walking down the AST and building an extended ast. The new
+node classes have additional methods and attributes for different
+usages. They include some support for static inference and local name
+scopes. Furthermore, astroid can also build partial trees by inspecting living
+objects.
+
+
+Installation
+------------
+
+Extract the tarball, jump into the created directory and run::
+
+ pip install .
+
+
+If you want to do an editable installation, you can run::
+
+ pip install -e .
+
+
+If you have any questions, please mail the code-quality@python.org
+mailing list for support. See
+http://mail.python.org/mailman/listinfo/code-quality for subscription
+information and archives.
+
+Documentation
+-------------
+http://astroid.readthedocs.io/en/latest/
+
+
+Python Versions
+---------------
+
+astroid 2.0 is currently available for Python 3 only. If you want Python 2
+support, older versions of astroid will still supported until 2020.
+
+Test
+----
+
+Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use
+either `tox` or `pytest`::
+
+ tox
+ pytest astroid
+
+
diff --git a/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/RECORD b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/RECORD
new file mode 100644
index 0000000..24fc2fa
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/RECORD
@@ -0,0 +1,123 @@
+astroid/__init__.py,sha256=tJJMsKzMv8hUgw3y0VQAAMx9BO-nrNUcNy_wI0XBFXo,5538
+astroid/__pkginfo__.py,sha256=BbpwFP_8Yd6vSqi29HWRX5_29cLS3-LLYKiEkp3r1k0,2143
+astroid/_ast.py,sha256=mHrblK8bCph2bSA4lz7MlhGHW1zPCwSpfqOudUMey7I,1158
+astroid/arguments.py,sha256=cui-UmbEeywSk0eitSrOhi9F0Ci2clS4qYXTi8uXRs4,11783
+astroid/as_string.py,sha256=WOnmoyFPbRV3M_Oe5ykkncPlaXY_eu9nQzzREHoRFRg,22231
+astroid/bases.py,sha256=Hjzwwftyn1VQ9ELZ7HuPueGolhljc3yvsh_k73qSSZQ,18941
+astroid/builder.py,sha256=0wrC4-ausU_nEEkgI8LJTsrNFN_XCbOkqoG2DsKCsks,16023
+astroid/context.py,sha256=QLpfM-S0WHbK3XKdB9Dr3EXwv_A0MrziPHx1GCPlVvw,5023
+astroid/decorators.py,sha256=ya-Fyn2Uqzi384FARjYf9v2oqwt64KAEG3mlOzuJrEc,4279
+astroid/exceptions.py,sha256=_IJRdLfyNSPVjxYgEd11Uu9XpdqE7uBCVOEIxt3ua70,7047
+astroid/helpers.py,sha256=9L1VT4TcL8Y3eifLIJNCvMp14QVaiRNoWnG2G4cL-Dk,9087
+astroid/inference.py,sha256=UG4DiAFvq32nqIsXWTen2bvCcJbaGpYJDBbny_3fy08,31945
+astroid/manager.py,sha256=eaj_pEomBFDp-BsTyEWhtm8UeIPSrB40rPWJls1-vDc,12426
+astroid/mixins.py,sha256=F2rv2Ow7AU3YT_2jitVJik95ZWRVK6hpf8BrkkspzUY,5571
+astroid/modutils.py,sha256=8zpeNSGwKLA_hlk36o0Q7m3bzCumVP5c-1DJ1dkllYU,23568
+astroid/node_classes.py,sha256=eRi9Wg_U_xUBbRElTQ9gRDo0PuwmVc8temvVfo9kD90,136690
+astroid/nodes.py,sha256=FIa3fF-rJ5BlhlGIcJGrEoyO_r5rMmWeajdAxtGEYFk,2927
+astroid/objects.py,sha256=XpJVSv3z-D2j50-5KLFM7AHXQB0DmFPWKSvaPqXc930,8634
+astroid/protocols.py,sha256=zdLu8TJcNViuqZubMmfENoMs3yGVB0BEh4HxHISVje4,26448
+astroid/raw_building.py,sha256=RpniuHnac0Za5U8tOXBnVNiyUnFb9IplkkuCKlLw7sA,16257
+astroid/rebuilder.py,sha256=XNrjCLgs6JunlcFprK8l4_ol1b8bjiJ0-vK7KRnG4PA,40508
+astroid/scoped_nodes.py,sha256=cracwywITGItGxrDdg6-xzH7isBOzlH2cww5O7WIOlw,92496
+astroid/test_utils.py,sha256=NmVu0GTYA3Fz3BG9sF0KQt2nrd6vMSBoKpGkkIjKz28,2309
+astroid/transforms.py,sha256=1npwJWcQUSIjcpcWd1pc-dJhtHOyiboQHsETAIQd5co,3377
+astroid/util.py,sha256=jg5LnqbWSZTZP1KgpxGBuC6Lfwhn9Jb2T2TohXghmC0,4785
+astroid/brain/brain_argparse.py,sha256=VEeMCr3OIjHmCy35uc-kX6nJ5_NUOAimpGJMr6CChoA,1024
+astroid/brain/brain_attrs.py,sha256=UiqX-t2DhW4v6PISVrZXsnpuk0Lml3JIrtfLiKqCKEw,1900
+astroid/brain/brain_builtin_inference.py,sha256=zSmhGQrIYApV25dLG3v9GulaMaJMJTCsrCffF4fyjvE,26914
+astroid/brain/brain_collections.py,sha256=XBlyS-6J3rlEqv_44EyB6z6YbJq4KJCoN_pKTxdBhOA,2828
+astroid/brain/brain_curses.py,sha256=tDnlCP1bEvleqCMz856yua9mM5um1p_JendFhT4rBFk,3303
+astroid/brain/brain_dateutil.py,sha256=q2dyV2907Bw4n7m2W4EEdok3Ndv8NzeIQxAZwXBiS14,795
+astroid/brain/brain_fstrings.py,sha256=VKVMijgLE2pg2dtXM6GGFgONOxOg8qA9D5V6dYzWTbQ,2121
+astroid/brain/brain_functools.py,sha256=M_Z99A43QUpmAmcA4mC5C-VsOiXpqvb1JxMb43-WYAY,6312
+astroid/brain/brain_gi.py,sha256=-EpcKf9z3wT_7v0k0WXIZtgk3-213lkfUX9bxeKOM3Y,6810
+astroid/brain/brain_hashlib.py,sha256=cp30hX5HhWqbWG3zqcNu8N3aHGeQK4DPi4ac8owBonU,2163
+astroid/brain/brain_io.py,sha256=DJcTFMTexrsHaGg2-kHoXwonddu13ImT7NEjiF1xPiU,1470
+astroid/brain/brain_mechanize.py,sha256=xTBc-u2DMmMPeci7DVFs4L2T98DwwLF_Ob5YZviLPp8,889
+astroid/brain/brain_multiprocessing.py,sha256=zXqLXg6dVYTkik1qSjo1cPJMZAGrkobVslp7ArwEXYQ,3108
+astroid/brain/brain_namedtuple_enum.py,sha256=d1TotHYqvfmPxVXKt7uGtdUetB3txEuSRxrhsEKb7lw,14028
+astroid/brain/brain_nose.py,sha256=kECw2jHmX0IUPX4Gx3XVGrflKGnlgPB79QHt6WU2cwQ,2211
+astroid/brain/brain_numpy.py,sha256=b55v7T5ps9ShC_qps2CAfU7u_mbW523ddtiaKDwx8Mo,14791
+astroid/brain/brain_pkg_resources.py,sha256=S_5UED1Zg8ObEJumRdpYGnjxZzemh_G_NFj3p5NGPfc,2262
+astroid/brain/brain_pytest.py,sha256=RXaNUVqy2R0et0Upn4GJkVgq5SG8Pl7zLlhqQg8Xx3Q,2384
+astroid/brain/brain_qt.py,sha256=U3VOrHer0cLvpxJC55MLEYrKn1Z3RQRXsc6fQ7u5hvI,2437
+astroid/brain/brain_random.py,sha256=2RZY-QEXMNWp7E6h0l0-ke-DtjKTOFlTdjiQZi3XdQc,2432
+astroid/brain/brain_re.py,sha256=le7VJHUAf80HyE_aQCh7_8FyDVK6JwNWA--c9RaMVQ8,1128
+astroid/brain/brain_six.py,sha256=6QHcKXoYf8yMMXWkx3g3lK0kqB5OFeYcXwjUTdgWTMw,6146
+astroid/brain/brain_ssl.py,sha256=8AvwQnF9eX7t_aqECEbQf2ob3dJUlpbg_alXcp-KWbk,3558
+astroid/brain/brain_subprocess.py,sha256=8EyK2m0DKAZM7DEf1ivJ7PY9TD2RpMWO8EFovZ_e7Rc,3631
+astroid/brain/brain_threading.py,sha256=Qv06IeuEwDlk8cibAlUxlPx2FWMyRpoSCMQTkCrL04Q,767
+astroid/brain/brain_typing.py,sha256=5lbwLS0a9BgxmqJIf26UXBAEyaFRpGigAowFHcdwp3I,2723
+astroid/brain/brain_uuid.py,sha256=flWrk1Ve7oqYrO8GTZ3To8RBYteRfYwvash-s9KiU9o,564
+astroid/interpreter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+astroid/interpreter/dunder_lookup.py,sha256=dP-AZU_aGPNt03b1ttrMglxzeU3NtgnG0MfpSLPH6sg,2155
+astroid/interpreter/objectmodel.py,sha256=ww1UQPL3iQdAJj8REjOANW7dBhiLA4Ynd7qJp7bl8d4,21307
+astroid/interpreter/_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+astroid/interpreter/_import/spec.py,sha256=oDdgvZZCuagrsjaiNOq3MbmEhabWyWUfX7xjJaTXEpE,10942
+astroid/interpreter/_import/util.py,sha256=inubUz6F3_kaMFaeleKUW6E6wCMIPrhU882zvwEZ02I,255
+astroid-2.1.0.dist-info/DESCRIPTION.rst,sha256=Ql_Ui1POiy9SIMSRfy_GV9u8xN0085VlNlyzi8h2C1o,2158
+astroid-2.1.0.dist-info/METADATA,sha256=Jm0lbRRcHDS_20vdBfDv0fQ0O4KvULJtfvNmacnGG-s,3262
+astroid-2.1.0.dist-info/RECORD,,
+astroid-2.1.0.dist-info/WHEEL,sha256=8Lm45v9gcYRm70DrgFGVe4WsUtUMi1_0Tso1hqPGMjA,92
+astroid-2.1.0.dist-info/metadata.json,sha256=OlYXVfIFjUmEMAOGa9U2dE4-Y1VqM9BHfUROm2iQvnY,1332
+astroid-2.1.0.dist-info/top_level.txt,sha256=HsdW4O2x7ZXRj6k-agi3RaQybGLobI3VSE-jt4vQUXM,8
+astroid-2.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+astroid/brain/__pycache__/brain_argparse.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_attrs.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_collections.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_curses.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_functools.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_gi.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_io.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_nose.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_pytest.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_qt.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_random.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_re.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_six.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_ssl.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_threading.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_typing.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_uuid.cpython-37.pyc,,
+astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc,,
+astroid/interpreter/_import/__pycache__/util.cpython-37.pyc,,
+astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc,,
+astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc,,
+astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc,,
+astroid/interpreter/__pycache__/__init__.cpython-37.pyc,,
+astroid/__pycache__/arguments.cpython-37.pyc,,
+astroid/__pycache__/as_string.cpython-37.pyc,,
+astroid/__pycache__/bases.cpython-37.pyc,,
+astroid/__pycache__/builder.cpython-37.pyc,,
+astroid/__pycache__/context.cpython-37.pyc,,
+astroid/__pycache__/decorators.cpython-37.pyc,,
+astroid/__pycache__/exceptions.cpython-37.pyc,,
+astroid/__pycache__/helpers.cpython-37.pyc,,
+astroid/__pycache__/inference.cpython-37.pyc,,
+astroid/__pycache__/manager.cpython-37.pyc,,
+astroid/__pycache__/mixins.cpython-37.pyc,,
+astroid/__pycache__/modutils.cpython-37.pyc,,
+astroid/__pycache__/nodes.cpython-37.pyc,,
+astroid/__pycache__/node_classes.cpython-37.pyc,,
+astroid/__pycache__/objects.cpython-37.pyc,,
+astroid/__pycache__/protocols.cpython-37.pyc,,
+astroid/__pycache__/raw_building.cpython-37.pyc,,
+astroid/__pycache__/rebuilder.cpython-37.pyc,,
+astroid/__pycache__/scoped_nodes.cpython-37.pyc,,
+astroid/__pycache__/test_utils.cpython-37.pyc,,
+astroid/__pycache__/transforms.cpython-37.pyc,,
+astroid/__pycache__/util.cpython-37.pyc,,
+astroid/__pycache__/_ast.cpython-37.pyc,,
+astroid/__pycache__/__init__.cpython-37.pyc,,
+astroid/__pycache__/__pkginfo__.cpython-37.pyc,,
diff --git a/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/WHEEL b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/WHEEL
new file mode 100644
index 0000000..6261a26
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.30.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/metadata.json b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/metadata.json
new file mode 100644
index 0000000..fdce336
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/metadata.json
@@ -0,0 +1 @@
+{"classifiers": ["Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Quality Assurance", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "code-quality@python.org", "name": "Python Code Quality Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/PyCQA/astroid"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "license": "LGPL", "metadata_version": "2.0", "name": "astroid", "requires_python": ">=3.4.*", "run_requires": [{"requires": ["lazy-object-proxy", "six", "wrapt"]}, {"environment": "python_version < \"3.5\"", "requires": ["typing"]}, {"environment": "python_version < \"3.7\" and implementation_name == \"cpython\"", "requires": ["typed-ast"]}], "summary": "An abstract syntax tree for Python with inference support.", "test_requires": [{"requires": ["pytest"]}], "version": "2.1.0"}
\ No newline at end of file
diff --git a/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/top_level.txt b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/top_level.txt
new file mode 100644
index 0000000..450d4fe
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid-2.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+astroid
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__init__.py b/basic python programmes/venv/Lib/site-packages/astroid/__init__.py
new file mode 100644
index 0000000..d36a5b4
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/__init__.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2016 Moises Lopez
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Python Abstract Syntax Tree New Generation
+
+The aim of this module is to provide a common base representation of
+python source code for projects such as pychecker, pyreverse,
+pylint... Well, actually the development of this library is essentially
+governed by pylint's needs.
+
+It extends class defined in the python's _ast module with some
+additional methods and attributes. Instance attributes are added by a
+builder object, which can either generate extended ast (let's call
+them astroid ;) by visiting an existent ast tree or by inspecting living
+object. Methods are added by monkey patching ast classes.
+
+Main modules are:
+
+* nodes and scoped_nodes for more information about methods and
+ attributes added to different node classes
+
+* the manager contains a high level object to get astroid trees from
+ source files and living objects. It maintains a cache of previously
+ constructed tree for quick access
+
+* builder contains the class responsible to build astroid trees
+"""
+
+import enum
+import itertools
+import os
+import sys
+
+import wrapt
+
+
+_Context = enum.Enum("Context", "Load Store Del")
+Load = _Context.Load
+Store = _Context.Store
+Del = _Context.Del
+del _Context
+
+
+from .__pkginfo__ import version as __version__
+
+# WARNING: internal imports order matters !
+
+# pylint: disable=redefined-builtin
+
+# make all exception classes accessible from astroid package
+from astroid.exceptions import *
+
+# make all node classes accessible from astroid package
+from astroid.nodes import *
+
+# trigger extra monkey-patching
+from astroid import inference
+
+# more stuff available
+from astroid import raw_building
+from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod
+from astroid.node_classes import are_exclusive, unpack_infer
+from astroid.scoped_nodes import builtin_lookup
+from astroid.builder import parse, extract_node
+from astroid.util import Uninferable
+
+# make a manager instance (borg) accessible from astroid package
+from astroid.manager import AstroidManager
+
+MANAGER = AstroidManager()
+del AstroidManager
+
+# transform utilities (filters and decorator)
+
+
+# pylint: disable=dangerous-default-value
+@wrapt.decorator
+def _inference_tip_cached(func, instance, args, kwargs, _cache={}):
+ """Cache decorator used for inference tips"""
+ node = args[0]
+ try:
+ return iter(_cache[func, node])
+ except KeyError:
+ result = func(*args, **kwargs)
+ # Need to keep an iterator around
+ original, copy = itertools.tee(result)
+ _cache[func, node] = list(copy)
+ return original
+
+
+# pylint: enable=dangerous-default-value
+
+
+def inference_tip(infer_function, raise_on_overwrite=False):
+ """Given an instance specific inference function, return a function to be
+ given to MANAGER.register_transform to set this inference function.
+
+ :param bool raise_on_overwrite: Raise an `InferenceOverwriteError`
+ if the inference tip will overwrite another. Used for debugging
+
+ Typical usage
+
+ .. sourcecode:: python
+
+ MANAGER.register_transform(Call, inference_tip(infer_named_tuple),
+ predicate)
+
+ .. Note::
+
+ Using an inference tip will override
+ any previously set inference tip for the given
+ node. Use a predicate in the transform to prevent
+ excess overwrites.
+ """
+
+ def transform(node, infer_function=infer_function):
+ if (
+ raise_on_overwrite
+ and node._explicit_inference is not None
+ and node._explicit_inference is not infer_function
+ ):
+ raise InferenceOverwriteError(
+ "Inference already set to {existing_inference}. "
+ "Trying to overwrite with {new_inference} for {node}".format(
+ existing_inference=infer_function,
+ new_inference=node._explicit_inference,
+ node=node,
+ )
+ )
+ # pylint: disable=no-value-for-parameter
+ node._explicit_inference = _inference_tip_cached(infer_function)
+ return node
+
+ return transform
+
+
+def register_module_extender(manager, module_name, get_extension_mod):
+ def transform(node):
+ extension_module = get_extension_mod()
+ for name, objs in extension_module.locals.items():
+ node.locals[name] = objs
+ for obj in objs:
+ if obj.parent is extension_module:
+ obj.parent = node
+
+ manager.register_transform(Module, transform, lambda n: n.name == module_name)
+
+
+# load brain plugins
+BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), "brain")
+if BRAIN_MODULES_DIR not in sys.path:
+ # add it to the end of the list so user path take precedence
+ sys.path.append(BRAIN_MODULES_DIR)
+# load modules in this directory
+for module in os.listdir(BRAIN_MODULES_DIR):
+ if module.endswith(".py"):
+ __import__(module[:-3])
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pkginfo__.py b/basic python programmes/venv/Lib/site-packages/astroid/__pkginfo__.py
new file mode 100644
index 0000000..4ab6756
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/__pkginfo__.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2015-2017 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2015 Radosław Ganczarek
+# Copyright (c) 2016 Moises Lopez
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Ashley Whetter
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""astroid packaging information"""
+
+distname = "astroid"
+
+modname = "astroid"
+
+version = "2.1.0"
+numversion = tuple(int(elem) for elem in version.split(".") if elem.isdigit())
+
+extras_require = {}
+install_requires = [
+ "lazy_object_proxy",
+ "six",
+ "wrapt",
+ 'typing;python_version<"3.5"',
+ 'typed_ast;python_version<"3.7" and implementation_name== "cpython"',
+]
+
+# pylint: disable=redefined-builtin; why license is a builtin anyway?
+license = "LGPL"
+
+author = "Python Code Quality Authority"
+author_email = "code-quality@python.org"
+mailinglist = "mailto://%s" % author_email
+web = "https://github.com/PyCQA/astroid"
+
+description = "An abstract syntax tree for Python with inference support."
+
+classifiers = [
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Software Development :: Quality Assurance",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+]
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..17566c3
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc
new file mode 100644
index 0000000..2271b05
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc
new file mode 100644
index 0000000..24556b6
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc
new file mode 100644
index 0000000..f25f4f6
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc
new file mode 100644
index 0000000..90b6d4a
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc
new file mode 100644
index 0000000..09eed81
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc
new file mode 100644
index 0000000..6a7ad0e
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc
new file mode 100644
index 0000000..6c4b788
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc
new file mode 100644
index 0000000..91cc363
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 0000000..2e95c17
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc
new file mode 100644
index 0000000..4ef3f7e
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc
new file mode 100644
index 0000000..45d1da5
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc
new file mode 100644
index 0000000..8f3eb7f
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc
new file mode 100644
index 0000000..c27209e
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc
new file mode 100644
index 0000000..832cf2c
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc
new file mode 100644
index 0000000..545780b
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc
new file mode 100644
index 0000000..b0fff01
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc
new file mode 100644
index 0000000..a918856
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc
new file mode 100644
index 0000000..c746fbb
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc
new file mode 100644
index 0000000..1bb1902
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc
new file mode 100644
index 0000000..27199bb
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc
new file mode 100644
index 0000000..d35a250
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc
new file mode 100644
index 0000000..1c55476
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc
new file mode 100644
index 0000000..0fa5a22
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc
new file mode 100644
index 0000000..3ac74f2
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/_ast.py b/basic python programmes/venv/Lib/site-packages/astroid/_ast.py
new file mode 100644
index 0000000..7868a7f
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/_ast.py
@@ -0,0 +1,39 @@
+import ast
+import sys
+from collections import namedtuple
+from typing import Optional
+
+_ast_py2 = _ast_py3 = None
+try:
+ import typed_ast.ast3 as _ast_py3
+ import typed_ast.ast27 as _ast_py2
+except ImportError:
+ pass
+
+
+FunctionType = namedtuple("FunctionType", ["argtypes", "returns"])
+
+
+def _get_parser_module(parse_python_two: bool = False):
+ if parse_python_two:
+ parser_module = _ast_py2
+ elif sys.version_info[:2] >= (3, 7):
+ # The typed_ast module doesn't support the full 3.7 syntax yet.
+ # Remove once typed_ast is updated.
+ parser_module = ast
+ else:
+ parser_module = _ast_py3
+ return parser_module or ast
+
+
+def _parse(string: str, parse_python_two: bool = False):
+ return _get_parser_module(parse_python_two=parse_python_two).parse(string)
+
+
+def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
+ """Given a correct type comment, obtain a FunctionType object"""
+ if _ast_py3 is None:
+ return None
+
+ func_type = _ast_py3.parse(type_comment, "", "func_type")
+ return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/arguments.py b/basic python programmes/venv/Lib/site-packages/astroid/arguments.py
new file mode 100644
index 0000000..c4bdc6d
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/arguments.py
@@ -0,0 +1,285 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Anthony Sottile
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+from astroid import bases
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import nodes
+from astroid import util
+
+
+class CallSite:
+ """Class for understanding arguments passed into a call site
+
+ It needs a call context, which contains the arguments and the
+ keyword arguments that were passed into a given call site.
+ In order to infer what an argument represents, call
+ :meth:`infer_argument` with the corresponding function node
+ and the argument name.
+ """
+
+ def __init__(self, callcontext, argument_context_map=None):
+ if argument_context_map is None:
+ argument_context_map = {}
+ self.argument_context_map = argument_context_map
+ args = callcontext.args
+ keywords = callcontext.keywords
+ self.duplicated_keywords = set()
+ self._unpacked_args = self._unpack_args(args)
+ self._unpacked_kwargs = self._unpack_keywords(keywords)
+
+ self.positional_arguments = [
+ arg for arg in self._unpacked_args if arg is not util.Uninferable
+ ]
+ self.keyword_arguments = {
+ key: value
+ for key, value in self._unpacked_kwargs.items()
+ if value is not util.Uninferable
+ }
+
+ @classmethod
+ def from_call(cls, call_node):
+ """Get a CallSite object from the given Call node."""
+ callcontext = contextmod.CallContext(call_node.args, call_node.keywords)
+ return cls(callcontext)
+
+ def has_invalid_arguments(self):
+ """Check if in the current CallSite were passed *invalid* arguments
+
+ This can mean multiple things. For instance, if an unpacking
+ of an invalid object was passed, then this method will return True.
+ Other cases can be when the arguments can't be inferred by astroid,
+ for example, by passing objects which aren't known statically.
+ """
+ return len(self.positional_arguments) != len(self._unpacked_args)
+
+ def has_invalid_keywords(self):
+ """Check if in the current CallSite were passed *invalid* keyword arguments
+
+ For instance, unpacking a dictionary with integer keys is invalid
+ (**{1:2}), because the keys must be strings, which will make this
+ method to return True. Other cases where this might return True if
+ objects which can't be inferred were passed.
+ """
+ return len(self.keyword_arguments) != len(self._unpacked_kwargs)
+
+ def _unpack_keywords(self, keywords):
+ values = {}
+ context = contextmod.InferenceContext()
+ context.extra_context = self.argument_context_map
+ for name, value in keywords:
+ if name is None:
+ # Then it's an unpacking operation (**)
+ try:
+ inferred = next(value.infer(context=context))
+ except exceptions.InferenceError:
+ values[name] = util.Uninferable
+ continue
+
+ if not isinstance(inferred, nodes.Dict):
+ # Not something we can work with.
+ values[name] = util.Uninferable
+ continue
+
+ for dict_key, dict_value in inferred.items:
+ try:
+ dict_key = next(dict_key.infer(context=context))
+ except exceptions.InferenceError:
+ values[name] = util.Uninferable
+ continue
+ if not isinstance(dict_key, nodes.Const):
+ values[name] = util.Uninferable
+ continue
+ if not isinstance(dict_key.value, str):
+ values[name] = util.Uninferable
+ continue
+ if dict_key.value in values:
+ # The name is already in the dictionary
+ values[dict_key.value] = util.Uninferable
+ self.duplicated_keywords.add(dict_key.value)
+ continue
+ values[dict_key.value] = dict_value
+ else:
+ values[name] = value
+ return values
+
+ def _unpack_args(self, args):
+ values = []
+ context = contextmod.InferenceContext()
+ context.extra_context = self.argument_context_map
+ for arg in args:
+ if isinstance(arg, nodes.Starred):
+ try:
+ inferred = next(arg.value.infer(context=context))
+ except exceptions.InferenceError:
+ values.append(util.Uninferable)
+ continue
+
+ if inferred is util.Uninferable:
+ values.append(util.Uninferable)
+ continue
+ if not hasattr(inferred, "elts"):
+ values.append(util.Uninferable)
+ continue
+ values.extend(inferred.elts)
+ else:
+ values.append(arg)
+ return values
+
+ def infer_argument(self, funcnode, name, context):
+ """infer a function argument value according to the call context
+
+ Arguments:
+ funcnode: The function being called.
+ name: The name of the argument whose value is being inferred.
+ context: Inference context object
+ """
+ if name in self.duplicated_keywords:
+ raise exceptions.InferenceError(
+ "The arguments passed to {func!r} " " have duplicate keywords.",
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+
+ # Look into the keywords first, maybe it's already there.
+ try:
+ return self.keyword_arguments[name].infer(context)
+ except KeyError:
+ pass
+
+ # Too many arguments given and no variable arguments.
+ if len(self.positional_arguments) > len(funcnode.args.args):
+ if not funcnode.args.vararg:
+ raise exceptions.InferenceError(
+ "Too many positional arguments "
+ "passed to {func!r} that does "
+ "not have *args.",
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+
+ positional = self.positional_arguments[: len(funcnode.args.args)]
+ vararg = self.positional_arguments[len(funcnode.args.args) :]
+ argindex = funcnode.args.find_argname(name)[0]
+ kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
+ kwargs = {
+ key: value
+ for key, value in self.keyword_arguments.items()
+ if key not in kwonlyargs
+ }
+ # If there are too few positionals compared to
+ # what the function expects to receive, check to see
+ # if the missing positional arguments were passed
+ # as keyword arguments and if so, place them into the
+ # positional args list.
+ if len(positional) < len(funcnode.args.args):
+ for func_arg in funcnode.args.args:
+ if func_arg.name in kwargs:
+ arg = kwargs.pop(func_arg.name)
+ positional.append(arg)
+
+ if argindex is not None:
+ # 2. first argument of instance/class method
+ if argindex == 0 and funcnode.type in ("method", "classmethod"):
+ if context.boundnode is not None:
+ boundnode = context.boundnode
+ else:
+ # XXX can do better ?
+ boundnode = funcnode.parent.frame()
+
+ if isinstance(boundnode, nodes.ClassDef):
+ # Verify that we're accessing a method
+ # of the metaclass through a class, as in
+ # `cls.metaclass_method`. In this case, the
+ # first argument is always the class.
+ method_scope = funcnode.parent.scope()
+ if method_scope is boundnode.metaclass():
+ return iter((boundnode,))
+
+ if funcnode.type == "method":
+ if not isinstance(boundnode, bases.Instance):
+ boundnode = bases.Instance(boundnode)
+ return iter((boundnode,))
+ if funcnode.type == "classmethod":
+ return iter((boundnode,))
+ # if we have a method, extract one position
+ # from the index, so we'll take in account
+ # the extra parameter represented by `self` or `cls`
+ if funcnode.type in ("method", "classmethod"):
+ argindex -= 1
+ # 2. search arg index
+ try:
+ return self.positional_arguments[argindex].infer(context)
+ except IndexError:
+ pass
+
+ if funcnode.args.kwarg == name:
+ # It wants all the keywords that were passed into
+ # the call site.
+ if self.has_invalid_keywords():
+ raise exceptions.InferenceError(
+ "Inference failed to find values for all keyword arguments "
+ "to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
+ "{keyword_arguments!r}.",
+ keyword_arguments=self.keyword_arguments,
+ unpacked_kwargs=self._unpacked_kwargs,
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+ kwarg = nodes.Dict(
+ lineno=funcnode.args.lineno,
+ col_offset=funcnode.args.col_offset,
+ parent=funcnode.args,
+ )
+ kwarg.postinit(
+ [(nodes.const_factory(key), value) for key, value in kwargs.items()]
+ )
+ return iter((kwarg,))
+ if funcnode.args.vararg == name:
+ # It wants all the args that were passed into
+ # the call site.
+ if self.has_invalid_arguments():
+ raise exceptions.InferenceError(
+ "Inference failed to find values for all positional "
+ "arguments to {func!r}: {unpacked_args!r} doesn't "
+ "correspond to {positional_arguments!r}.",
+ positional_arguments=self.positional_arguments,
+ unpacked_args=self._unpacked_args,
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+ args = nodes.Tuple(
+ lineno=funcnode.args.lineno,
+ col_offset=funcnode.args.col_offset,
+ parent=funcnode.args,
+ )
+ args.postinit(vararg)
+ return iter((args,))
+
+ # Check if it's a default parameter.
+ try:
+ return funcnode.args.default_value(name).infer(context)
+ except exceptions.NoDefault:
+ pass
+ raise exceptions.InferenceError(
+ "No value found for argument {name} to " "{func!r}",
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/as_string.py b/basic python programmes/venv/Lib/site-packages/astroid/as_string.py
new file mode 100644
index 0000000..7042272
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/as_string.py
@@ -0,0 +1,630 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2010 Daniel Harding
+# Copyright (c) 2013-2016, 2018 Claudiu Popa
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Jared Garst
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 rr-
+# Copyright (c) 2018 brendanator
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""This module renders Astroid nodes as string:
+
+* :func:`to_code` function return equivalent (hopefully valid) python string
+
+* :func:`dump` function return an internal representation of nodes found
+ in the tree, useful for debugging or understanding the tree structure
+"""
+import sys
+
+
+# pylint: disable=unused-argument
+
+DOC_NEWLINE = "\0"
+
+
+class AsStringVisitor:
+ """Visitor to render an Astroid node as a valid python code string"""
+
+ def __init__(self, indent):
+ self.indent = indent
+
+ def __call__(self, node):
+ """Makes this visitor behave as a simple function"""
+ return node.accept(self).replace(DOC_NEWLINE, "\n")
+
+ def _docs_dedent(self, doc):
+ """Stop newlines in docs being indented by self._stmt_list"""
+ return '\n%s"""%s"""' % (self.indent, doc.replace("\n", DOC_NEWLINE))
+
+ def _stmt_list(self, stmts, indent=True):
+ """return a list of nodes to string"""
+ stmts = "\n".join(nstr for nstr in [n.accept(self) for n in stmts] if nstr)
+ if indent:
+ return self.indent + stmts.replace("\n", "\n" + self.indent)
+
+ return stmts
+
+ def _precedence_parens(self, node, child, is_left=True):
+ """Wrap child in parens only if required to keep same semantics"""
+ if self._should_wrap(node, child, is_left):
+ return "(%s)" % child.accept(self)
+
+ return child.accept(self)
+
+ def _should_wrap(self, node, child, is_left):
+ """Wrap child if:
+ - it has lower precedence
+ - same precedence with position opposite to associativity direction
+ """
+ node_precedence = node.op_precedence()
+ child_precedence = child.op_precedence()
+
+ if node_precedence > child_precedence:
+ # 3 * (4 + 5)
+ return True
+
+ if (
+ node_precedence == child_precedence
+ and is_left != node.op_left_associative()
+ ):
+ # 3 - (4 - 5)
+ # (2**3)**4
+ return True
+
+ return False
+
+ ## visit_ methods ###########################################
+
+ def visit_arguments(self, node):
+ """return an astroid.Function node as string"""
+ return node.format_args()
+
+ def visit_assignattr(self, node):
+ """return an astroid.AssAttr node as string"""
+ return self.visit_attribute(node)
+
+ def visit_assert(self, node):
+ """return an astroid.Assert node as string"""
+ if node.fail:
+ return "assert %s, %s" % (node.test.accept(self), node.fail.accept(self))
+ return "assert %s" % node.test.accept(self)
+
+ def visit_assignname(self, node):
+ """return an astroid.AssName node as string"""
+ return node.name
+
+ def visit_assign(self, node):
+ """return an astroid.Assign node as string"""
+ lhs = " = ".join(n.accept(self) for n in node.targets)
+ return "%s = %s" % (lhs, node.value.accept(self))
+
+ def visit_augassign(self, node):
+ """return an astroid.AugAssign node as string"""
+ return "%s %s %s" % (node.target.accept(self), node.op, node.value.accept(self))
+
+ def visit_annassign(self, node):
+ """Return an astroid.AugAssign node as string"""
+
+ target = node.target.accept(self)
+ annotation = node.annotation.accept(self)
+ if node.value is None:
+ return "%s: %s" % (target, annotation)
+ return "%s: %s = %s" % (target, annotation, node.value.accept(self))
+
+ def visit_repr(self, node):
+ """return an astroid.Repr node as string"""
+ return "`%s`" % node.value.accept(self)
+
+ def visit_binop(self, node):
+ """return an astroid.BinOp node as string"""
+ left = self._precedence_parens(node, node.left)
+ right = self._precedence_parens(node, node.right, is_left=False)
+ if node.op == "**":
+ return "%s%s%s" % (left, node.op, right)
+
+ return "%s %s %s" % (left, node.op, right)
+
+ def visit_boolop(self, node):
+ """return an astroid.BoolOp node as string"""
+ values = ["%s" % self._precedence_parens(node, n) for n in node.values]
+ return (" %s " % node.op).join(values)
+
+ def visit_break(self, node):
+ """return an astroid.Break node as string"""
+ return "break"
+
+ def visit_call(self, node):
+ """return an astroid.Call node as string"""
+ expr_str = self._precedence_parens(node, node.func)
+ args = [arg.accept(self) for arg in node.args]
+ if node.keywords:
+ keywords = [kwarg.accept(self) for kwarg in node.keywords]
+ else:
+ keywords = []
+
+ args.extend(keywords)
+ return "%s(%s)" % (expr_str, ", ".join(args))
+
+ def visit_classdef(self, node):
+ """return an astroid.ClassDef node as string"""
+ decorate = node.decorators.accept(self) if node.decorators else ""
+ bases = ", ".join(n.accept(self) for n in node.bases)
+ metaclass = node.metaclass()
+ if metaclass and not node.has_metaclass_hack():
+ if bases:
+ bases = "(%s, metaclass=%s)" % (bases, metaclass.name)
+ else:
+ bases = "(metaclass=%s)" % metaclass.name
+ else:
+ bases = "(%s)" % bases if bases else ""
+ docs = self._docs_dedent(node.doc) if node.doc else ""
+ return "\n\n%sclass %s%s:%s\n%s\n" % (
+ decorate,
+ node.name,
+ bases,
+ docs,
+ self._stmt_list(node.body),
+ )
+
+ def visit_compare(self, node):
+ """return an astroid.Compare node as string"""
+ rhs_str = " ".join(
+ [
+ "%s %s" % (op, self._precedence_parens(node, expr, is_left=False))
+ for op, expr in node.ops
+ ]
+ )
+ return "%s %s" % (self._precedence_parens(node, node.left), rhs_str)
+
+ def visit_comprehension(self, node):
+ """return an astroid.Comprehension node as string"""
+ ifs = "".join(" if %s" % n.accept(self) for n in node.ifs)
+ return "for %s in %s%s" % (
+ node.target.accept(self),
+ node.iter.accept(self),
+ ifs,
+ )
+
+ def visit_const(self, node):
+ """return an astroid.Const node as string"""
+ if node.value is Ellipsis:
+ return "..."
+ return repr(node.value)
+
+ def visit_continue(self, node):
+ """return an astroid.Continue node as string"""
+ return "continue"
+
+ def visit_delete(self, node): # XXX check if correct
+ """return an astroid.Delete node as string"""
+ return "del %s" % ", ".join(child.accept(self) for child in node.targets)
+
+ def visit_delattr(self, node):
+ """return an astroid.DelAttr node as string"""
+ return self.visit_attribute(node)
+
+ def visit_delname(self, node):
+ """return an astroid.DelName node as string"""
+ return node.name
+
+ def visit_decorators(self, node):
+ """return an astroid.Decorators node as string"""
+ return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes)
+
+ def visit_dict(self, node):
+ """return an astroid.Dict node as string"""
+ return "{%s}" % ", ".join(self._visit_dict(node))
+
+ def _visit_dict(self, node):
+ for key, value in node.items:
+ key = key.accept(self)
+ value = value.accept(self)
+ if key == "**":
+ # It can only be a DictUnpack node.
+ yield key + value
+ else:
+ yield "%s: %s" % (key, value)
+
+ def visit_dictunpack(self, node):
+ return "**"
+
+ def visit_dictcomp(self, node):
+ """return an astroid.DictComp node as string"""
+ return "{%s: %s %s}" % (
+ node.key.accept(self),
+ node.value.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_expr(self, node):
+ """return an astroid.Discard node as string"""
+ return node.value.accept(self)
+
+ def visit_emptynode(self, node):
+ """dummy method for visiting an Empty node"""
+ return ""
+
+ def visit_excepthandler(self, node):
+ if node.type:
+ if node.name:
+ excs = "except %s, %s" % (
+ node.type.accept(self),
+ node.name.accept(self),
+ )
+ else:
+ excs = "except %s" % node.type.accept(self)
+ else:
+ excs = "except"
+ return "%s:\n%s" % (excs, self._stmt_list(node.body))
+
+ def visit_ellipsis(self, node):
+ """return an astroid.Ellipsis node as string"""
+ return "..."
+
+ def visit_empty(self, node):
+ """return an Empty node as string"""
+ return ""
+
+ def visit_exec(self, node):
+ """return an astroid.Exec node as string"""
+ if node.locals:
+ return "exec %s in %s, %s" % (
+ node.expr.accept(self),
+ node.locals.accept(self),
+ node.globals.accept(self),
+ )
+ if node.globals:
+ return "exec %s in %s" % (node.expr.accept(self), node.globals.accept(self))
+ return "exec %s" % node.expr.accept(self)
+
+ def visit_extslice(self, node):
+ """return an astroid.ExtSlice node as string"""
+ return ", ".join(dim.accept(self) for dim in node.dims)
+
+ def visit_for(self, node):
+ """return an astroid.For node as string"""
+ fors = "for %s in %s:\n%s" % (
+ node.target.accept(self),
+ node.iter.accept(self),
+ self._stmt_list(node.body),
+ )
+ if node.orelse:
+ fors = "%s\nelse:\n%s" % (fors, self._stmt_list(node.orelse))
+ return fors
+
+ def visit_importfrom(self, node):
+ """return an astroid.ImportFrom node as string"""
+ return "from %s import %s" % (
+ "." * (node.level or 0) + node.modname,
+ _import_string(node.names),
+ )
+
+ def visit_functiondef(self, node):
+ """return an astroid.Function node as string"""
+ decorate = node.decorators.accept(self) if node.decorators else ""
+ docs = self._docs_dedent(node.doc) if node.doc else ""
+ trailer = ":"
+ if node.returns:
+ return_annotation = "->" + node.returns.as_string()
+ trailer = return_annotation + ":"
+ def_format = "\n%sdef %s(%s)%s%s\n%s"
+ return def_format % (
+ decorate,
+ node.name,
+ node.args.accept(self),
+ trailer,
+ docs,
+ self._stmt_list(node.body),
+ )
+
+ def visit_generatorexp(self, node):
+ """return an astroid.GeneratorExp node as string"""
+ return "(%s %s)" % (
+ node.elt.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_attribute(self, node):
+ """return an astroid.Getattr node as string"""
+ return "%s.%s" % (self._precedence_parens(node, node.expr), node.attrname)
+
+ def visit_global(self, node):
+ """return an astroid.Global node as string"""
+ return "global %s" % ", ".join(node.names)
+
+ def visit_if(self, node):
+ """return an astroid.If node as string"""
+ ifs = ["if %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))]
+ if node.has_elif_block():
+ ifs.append("el%s" % self._stmt_list(node.orelse, indent=False))
+ elif node.orelse:
+ ifs.append("else:\n%s" % self._stmt_list(node.orelse))
+ return "\n".join(ifs)
+
+ def visit_ifexp(self, node):
+ """return an astroid.IfExp node as string"""
+ return "%s if %s else %s" % (
+ self._precedence_parens(node, node.body, is_left=True),
+ self._precedence_parens(node, node.test, is_left=True),
+ self._precedence_parens(node, node.orelse, is_left=False),
+ )
+
+ def visit_import(self, node):
+ """return an astroid.Import node as string"""
+ return "import %s" % _import_string(node.names)
+
+ def visit_keyword(self, node):
+ """return an astroid.Keyword node as string"""
+ if node.arg is None:
+ return "**%s" % node.value.accept(self)
+ return "%s=%s" % (node.arg, node.value.accept(self))
+
+ def visit_lambda(self, node):
+ """return an astroid.Lambda node as string"""
+ args = node.args.accept(self)
+ body = node.body.accept(self)
+ if args:
+ return "lambda %s: %s" % (args, body)
+
+ return "lambda: %s" % body
+
+ def visit_list(self, node):
+ """return an astroid.List node as string"""
+ return "[%s]" % ", ".join(child.accept(self) for child in node.elts)
+
+ def visit_listcomp(self, node):
+ """return an astroid.ListComp node as string"""
+ return "[%s %s]" % (
+ node.elt.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_module(self, node):
+ """return an astroid.Module node as string"""
+ docs = '"""%s"""\n\n' % node.doc if node.doc else ""
+ return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n"
+
+ def visit_name(self, node):
+ """return an astroid.Name node as string"""
+ return node.name
+
+ def visit_pass(self, node):
+ """return an astroid.Pass node as string"""
+ return "pass"
+
+ def visit_print(self, node):
+ """return an astroid.Print node as string"""
+ nodes = ", ".join(n.accept(self) for n in node.values)
+ if not node.nl:
+ nodes = "%s," % nodes
+ if node.dest:
+ return "print >> %s, %s" % (node.dest.accept(self), nodes)
+ return "print %s" % nodes
+
+ def visit_raise(self, node):
+ """return an astroid.Raise node as string"""
+ if node.exc:
+ if node.inst:
+ if node.tback:
+ return "raise %s, %s, %s" % (
+ node.exc.accept(self),
+ node.inst.accept(self),
+ node.tback.accept(self),
+ )
+ return "raise %s, %s" % (node.exc.accept(self), node.inst.accept(self))
+ return "raise %s" % node.exc.accept(self)
+ return "raise"
+
+ def visit_return(self, node):
+ """return an astroid.Return node as string"""
+ if node.is_tuple_return() and len(node.value.elts) > 1:
+ elts = [child.accept(self) for child in node.value.elts]
+ return "return %s" % ", ".join(elts)
+
+ if node.value:
+ return "return %s" % node.value.accept(self)
+
+ return "return"
+
+ def visit_index(self, node):
+ """return an astroid.Index node as string"""
+ return node.value.accept(self)
+
+ def visit_set(self, node):
+ """return an astroid.Set node as string"""
+ return "{%s}" % ", ".join(child.accept(self) for child in node.elts)
+
+ def visit_setcomp(self, node):
+ """return an astroid.SetComp node as string"""
+ return "{%s %s}" % (
+ node.elt.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_slice(self, node):
+ """return an astroid.Slice node as string"""
+ lower = node.lower.accept(self) if node.lower else ""
+ upper = node.upper.accept(self) if node.upper else ""
+ step = node.step.accept(self) if node.step else ""
+ if step:
+ return "%s:%s:%s" % (lower, upper, step)
+ return "%s:%s" % (lower, upper)
+
+ def visit_subscript(self, node):
+ """return an astroid.Subscript node as string"""
+ idx = node.slice
+ if idx.__class__.__name__.lower() == "index":
+ idx = idx.value
+ idxstr = idx.accept(self)
+ if idx.__class__.__name__.lower() == "tuple" and idx.elts:
+ # Remove parenthesis in tuple and extended slice.
+ # a[(::1, 1:)] is not valid syntax.
+ idxstr = idxstr[1:-1]
+ return "%s[%s]" % (self._precedence_parens(node, node.value), idxstr)
+
+ def visit_tryexcept(self, node):
+ """return an astroid.TryExcept node as string"""
+ trys = ["try:\n%s" % self._stmt_list(node.body)]
+ for handler in node.handlers:
+ trys.append(handler.accept(self))
+ if node.orelse:
+ trys.append("else:\n%s" % self._stmt_list(node.orelse))
+ return "\n".join(trys)
+
+ def visit_tryfinally(self, node):
+ """return an astroid.TryFinally node as string"""
+ return "try:\n%s\nfinally:\n%s" % (
+ self._stmt_list(node.body),
+ self._stmt_list(node.finalbody),
+ )
+
+ def visit_tuple(self, node):
+ """return an astroid.Tuple node as string"""
+ if len(node.elts) == 1:
+ return "(%s, )" % node.elts[0].accept(self)
+ return "(%s)" % ", ".join(child.accept(self) for child in node.elts)
+
+ def visit_unaryop(self, node):
+ """return an astroid.UnaryOp node as string"""
+ if node.op == "not":
+ operator = "not "
+ else:
+ operator = node.op
+ return "%s%s" % (operator, self._precedence_parens(node, node.operand))
+
+ def visit_while(self, node):
+ """return an astroid.While node as string"""
+ whiles = "while %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))
+ if node.orelse:
+ whiles = "%s\nelse:\n%s" % (whiles, self._stmt_list(node.orelse))
+ return whiles
+
+ def visit_with(self, node): # 'with' without 'as' is possible
+ """return an astroid.With node as string"""
+ items = ", ".join(
+ ("%s" % expr.accept(self)) + (vars and " as %s" % (vars.accept(self)) or "")
+ for expr, vars in node.items
+ )
+ return "with %s:\n%s" % (items, self._stmt_list(node.body))
+
+ def visit_yield(self, node):
+ """yield an ast.Yield node as string"""
+ yi_val = (" " + node.value.accept(self)) if node.value else ""
+ expr = "yield" + yi_val
+ if node.parent.is_statement:
+ return expr
+
+ return "(%s)" % (expr,)
+
+ def visit_starred(self, node):
+ """return Starred node as string"""
+ return "*" + node.value.accept(self)
+
+ # These aren't for real AST nodes, but for inference objects.
+
+ def visit_frozenset(self, node):
+ return node.parent.accept(self)
+
+ def visit_super(self, node):
+ return node.parent.accept(self)
+
+ def visit_uninferable(self, node):
+ return str(node)
+
+
+class AsStringVisitor3(AsStringVisitor):
+ """AsStringVisitor3 overwrites some AsStringVisitor methods"""
+
+ def visit_excepthandler(self, node):
+ if node.type:
+ if node.name:
+ excs = "except %s as %s" % (
+ node.type.accept(self),
+ node.name.accept(self),
+ )
+ else:
+ excs = "except %s" % node.type.accept(self)
+ else:
+ excs = "except"
+ return "%s:\n%s" % (excs, self._stmt_list(node.body))
+
+ def visit_nonlocal(self, node):
+ """return an astroid.Nonlocal node as string"""
+ return "nonlocal %s" % ", ".join(node.names)
+
+ def visit_raise(self, node):
+ """return an astroid.Raise node as string"""
+ if node.exc:
+ if node.cause:
+ return "raise %s from %s" % (
+ node.exc.accept(self),
+ node.cause.accept(self),
+ )
+ return "raise %s" % node.exc.accept(self)
+ return "raise"
+
+ def visit_yieldfrom(self, node):
+ """ Return an astroid.YieldFrom node as string. """
+ yi_val = (" " + node.value.accept(self)) if node.value else ""
+ expr = "yield from" + yi_val
+ if node.parent.is_statement:
+ return expr
+
+ return "(%s)" % (expr,)
+
+ def visit_asyncfunctiondef(self, node):
+ function = super(AsStringVisitor3, self).visit_functiondef(node)
+ return "async " + function.strip()
+
+ def visit_await(self, node):
+ return "await %s" % node.value.accept(self)
+
+ def visit_asyncwith(self, node):
+ return "async %s" % self.visit_with(node)
+
+ def visit_asyncfor(self, node):
+ return "async %s" % self.visit_for(node)
+
+ def visit_joinedstr(self, node):
+ # Special treatment for constants,
+ # as we want to join literals not reprs
+ string = "".join(
+ value.value if type(value).__name__ == "Const" else value.accept(self)
+ for value in node.values
+ )
+ return "f'%s'" % string
+
+ def visit_formattedvalue(self, node):
+ return "{%s}" % node.value.accept(self)
+
+ def visit_comprehension(self, node):
+ """return an astroid.Comprehension node as string"""
+ return "%s%s" % (
+ "async " if node.is_async else "",
+ super(AsStringVisitor3, self).visit_comprehension(node),
+ )
+
+
+def _import_string(names):
+ """return a list of (name, asname) formatted as a string"""
+ _names = []
+ for name, asname in names:
+ if asname is not None:
+ _names.append("%s as %s" % (name, asname))
+ else:
+ _names.append(name)
+ return ", ".join(_names)
+
+
+if sys.version_info >= (3, 0):
+ AsStringVisitor = AsStringVisitor3
+
+# This sets the default indent to 4 spaces.
+to_code = AsStringVisitor(" ")
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/bases.py b/basic python programmes/venv/Lib/site-packages/astroid/bases.py
new file mode 100644
index 0000000..318ddb5
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/bases.py
@@ -0,0 +1,545 @@
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2012 FELD Boris
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016-2017 Derek Gustafson
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Daniel Colascione
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""This module contains base classes and functions for the nodes and some
+inference utils.
+"""
+
+import builtins
+import collections
+import sys
+
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import util
+
+objectmodel = util.lazy_import("interpreter.objectmodel")
+helpers = util.lazy_import("helpers")
+BUILTINS = builtins.__name__
+manager = util.lazy_import("manager")
+MANAGER = manager.AstroidManager()
+
+if sys.version_info >= (3, 0):
+ # TODO: check if needs special treatment
+ BUILTINS = "builtins"
+ BOOL_SPECIAL_METHOD = "__bool__"
+else:
+ BUILTINS = "__builtin__"
+ BOOL_SPECIAL_METHOD = "__nonzero__"
+PROPERTIES = {BUILTINS + ".property", "abc.abstractproperty"}
+# List of possible property names. We use this list in order
+# to see if a method is a property or not. This should be
+# pretty reliable and fast, the alternative being to check each
+# decorator to see if its a real property-like descriptor, which
+# can be too complicated.
+# Also, these aren't qualified, because each project can
+# define them, we shouldn't expect to know every possible
+# property-like decorator!
+POSSIBLE_PROPERTIES = {
+ "cached_property",
+ "cachedproperty",
+ "lazyproperty",
+ "lazy_property",
+ "reify",
+ "lazyattribute",
+ "lazy_attribute",
+ "LazyProperty",
+ "lazy",
+ "cache_readonly",
+}
+
+
+def _is_property(meth):
+ if PROPERTIES.intersection(meth.decoratornames()):
+ return True
+ stripped = {
+ name.split(".")[-1]
+ for name in meth.decoratornames()
+ if name is not util.Uninferable
+ }
+ if any(name in stripped for name in POSSIBLE_PROPERTIES):
+ return True
+
+ # Lookup for subclasses of *property*
+ if not meth.decorators:
+ return False
+ for decorator in meth.decorators.nodes or ():
+ inferred = helpers.safe_infer(decorator)
+ if inferred is None or inferred is util.Uninferable:
+ continue
+ if inferred.__class__.__name__ == "ClassDef":
+ for base_class in inferred.bases:
+ module, _ = base_class.lookup(base_class.name)
+ if module.name == BUILTINS and base_class.name == "property":
+ return True
+
+ return False
+
+
+class Proxy:
+ """a simple proxy object
+
+ Note:
+
+ Subclasses of this object will need a custom __getattr__
+ if new instance attributes are created. See the Const class
+ """
+
+ _proxied = None # proxied object may be set by class or by instance
+
+ def __init__(self, proxied=None):
+ if proxied is not None:
+ self._proxied = proxied
+
+ def __getattr__(self, name):
+ if name == "_proxied":
+ return getattr(self.__class__, "_proxied")
+ if name in self.__dict__:
+ return self.__dict__[name]
+ return getattr(self._proxied, name)
+
+ def infer(self, context=None):
+ yield self
+
+
+def _infer_stmts(stmts, context, frame=None):
+ """Return an iterator on statements inferred by each statement in *stmts*."""
+ inferred = False
+ if context is not None:
+ name = context.lookupname
+ context = context.clone()
+ else:
+ name = None
+ context = contextmod.InferenceContext()
+
+ for stmt in stmts:
+ if stmt is util.Uninferable:
+ yield stmt
+ inferred = True
+ continue
+ context.lookupname = stmt._infer_name(frame, name)
+ try:
+ for inferred in stmt.infer(context=context):
+ yield inferred
+ inferred = True
+ except exceptions.NameInferenceError:
+ continue
+ except exceptions.InferenceError:
+ yield util.Uninferable
+ inferred = True
+ if not inferred:
+ raise exceptions.InferenceError(
+ "Inference failed for all members of {stmts!r}.",
+ stmts=stmts,
+ frame=frame,
+ context=context,
+ )
+
+
+def _infer_method_result_truth(instance, method_name, context):
+ # Get the method from the instance and try to infer
+ # its return's truth value.
+ meth = next(instance.igetattr(method_name, context=context), None)
+ if meth and hasattr(meth, "infer_call_result"):
+ if not meth.callable():
+ return util.Uninferable
+ try:
+ for value in meth.infer_call_result(instance, context=context):
+ if value is util.Uninferable:
+ return value
+
+ inferred = next(value.infer(context=context))
+ return inferred.bool_value()
+ except exceptions.InferenceError:
+ pass
+ return util.Uninferable
+
+
+class BaseInstance(Proxy):
+ """An instance base class, which provides lookup methods for potential instances."""
+
+ special_attributes = None
+
+ def display_type(self):
+ return "Instance of"
+
+ def getattr(self, name, context=None, lookupclass=True):
+ try:
+ values = self._proxied.instance_attr(name, context)
+ except exceptions.AttributeInferenceError as exc:
+ if self.special_attributes and name in self.special_attributes:
+ return [self.special_attributes.lookup(name)]
+
+ if lookupclass:
+ # Class attributes not available through the instance
+ # unless they are explicitly defined.
+ return self._proxied.getattr(name, context, class_context=False)
+
+ raise exceptions.AttributeInferenceError(
+ target=self, attribute=name, context=context
+ ) from exc
+ # since we've no context information, return matching class members as
+ # well
+ if lookupclass:
+ try:
+ return values + self._proxied.getattr(
+ name, context, class_context=False
+ )
+ except exceptions.AttributeInferenceError:
+ pass
+ return values
+
+ def igetattr(self, name, context=None):
+ """inferred getattr"""
+ if not context:
+ context = contextmod.InferenceContext()
+ try:
+ # avoid recursively inferring the same attr on the same class
+ if context.push((self._proxied, name)):
+ return
+
+ # XXX frame should be self._proxied, or not ?
+ get_attr = self.getattr(name, context, lookupclass=False)
+ yield from _infer_stmts(
+ self._wrap_attr(get_attr, context), context, frame=self
+ )
+ except exceptions.AttributeInferenceError as error:
+ try:
+ # fallback to class.igetattr since it has some logic to handle
+ # descriptors
+ # But only if the _proxied is the Class.
+ if self._proxied.__class__.__name__ != "ClassDef":
+ raise exceptions.InferenceError(**vars(error)) from error
+ attrs = self._proxied.igetattr(name, context, class_context=False)
+ yield from self._wrap_attr(attrs, context)
+ except exceptions.AttributeInferenceError as error:
+ raise exceptions.InferenceError(**vars(error)) from error
+
+ def _wrap_attr(self, attrs, context=None):
+ """wrap bound methods of attrs in a InstanceMethod proxies"""
+ for attr in attrs:
+ if isinstance(attr, UnboundMethod):
+ if _is_property(attr):
+ yield from attr.infer_call_result(self, context)
+ else:
+ yield BoundMethod(attr, self)
+ elif hasattr(attr, "name") and attr.name == "":
+ # This is a lambda function defined at class level,
+ # since its scope is the underlying _proxied class.
+ # Unfortunately, we can't do an isinstance check here,
+ # because of the circular dependency between astroid.bases
+ # and astroid.scoped_nodes.
+ if attr.statement().scope() == self._proxied:
+ if attr.args.args and attr.args.args[0].name == "self":
+ yield BoundMethod(attr, self)
+ continue
+ yield attr
+ else:
+ yield attr
+
+ def infer_call_result(self, caller, context=None):
+ """infer what a class instance is returning when called"""
+ context = contextmod.bind_context_to_node(context, self)
+ inferred = False
+ for node in self._proxied.igetattr("__call__", context):
+ if node is util.Uninferable or not node.callable():
+ continue
+ for res in node.infer_call_result(caller, context):
+ inferred = True
+ yield res
+ if not inferred:
+ raise exceptions.InferenceError(node=self, caller=caller, context=context)
+
+
+class Instance(BaseInstance):
+ """A special node representing a class instance."""
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel())
+
+ def __repr__(self):
+ return "" % (
+ self._proxied.root().name,
+ self._proxied.name,
+ id(self),
+ )
+
+ def __str__(self):
+ return "Instance of %s.%s" % (self._proxied.root().name, self._proxied.name)
+
+ def callable(self):
+ try:
+ self._proxied.getattr("__call__", class_context=False)
+ return True
+ except exceptions.AttributeInferenceError:
+ return False
+
+ def pytype(self):
+ return self._proxied.qname()
+
+ def display_type(self):
+ return "Instance of"
+
+ def bool_value(self):
+ """Infer the truth value for an Instance
+
+ The truth value of an instance is determined by these conditions:
+
+ * if it implements __bool__ on Python 3 or __nonzero__
+ on Python 2, then its bool value will be determined by
+ calling this special method and checking its result.
+ * when this method is not defined, __len__() is called, if it
+ is defined, and the object is considered true if its result is
+ nonzero. If a class defines neither __len__() nor __bool__(),
+ all its instances are considered true.
+ """
+ context = contextmod.InferenceContext()
+ context.callcontext = contextmod.CallContext(args=[])
+ context.boundnode = self
+
+ try:
+ result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context)
+ except (exceptions.InferenceError, exceptions.AttributeInferenceError):
+ # Fallback to __len__.
+ try:
+ result = _infer_method_result_truth(self, "__len__", context)
+ except (exceptions.AttributeInferenceError, exceptions.InferenceError):
+ return True
+ return result
+
+ # This is set in inference.py.
+ def getitem(self, index, context=None):
+ pass
+
+
+class UnboundMethod(Proxy):
+ """a special node representing a method not bound to an instance"""
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel())
+
+ def __repr__(self):
+ frame = self._proxied.parent.frame()
+ return "<%s %s of %s at 0x%s" % (
+ self.__class__.__name__,
+ self._proxied.name,
+ frame.qname(),
+ id(self),
+ )
+
+ def implicit_parameters(self):
+ return 0
+
+ def is_bound(self):
+ return False
+
+ def getattr(self, name, context=None):
+ if name in self.special_attributes:
+ return [self.special_attributes.lookup(name)]
+ return self._proxied.getattr(name, context)
+
+ def igetattr(self, name, context=None):
+ if name in self.special_attributes:
+ return iter((self.special_attributes.lookup(name),))
+ return self._proxied.igetattr(name, context)
+
+ def infer_call_result(self, caller, context):
+ """
+ The boundnode of the regular context with a function called
+ on ``object.__new__`` will be of type ``object``,
+ which is incorrect for the argument in general.
+ If no context is given the ``object.__new__`` call argument will
+ correctly inferred except when inside a call that requires
+ the additional context (such as a classmethod) of the boundnode
+ to determine which class the method was called from
+ """
+
+ # If we're unbound method __new__ of builtin object, the result is an
+ # instance of the class given as first argument.
+ if (
+ self._proxied.name == "__new__"
+ and self._proxied.parent.frame().qname() == "%s.object" % BUILTINS
+ ):
+ if caller.args:
+ node_context = context.extra_context.get(caller.args[0])
+ infer = caller.args[0].infer(context=node_context)
+ else:
+ infer = []
+ return (Instance(x) if x is not util.Uninferable else x for x in infer)
+ return self._proxied.infer_call_result(caller, context)
+
+ def bool_value(self):
+ return True
+
+
+class BoundMethod(UnboundMethod):
+ """a special node representing a method bound to an instance"""
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel())
+
+ def __init__(self, proxy, bound):
+ UnboundMethod.__init__(self, proxy)
+ self.bound = bound
+
+ def implicit_parameters(self):
+ return 1
+
+ def is_bound(self):
+ return True
+
+ def _infer_type_new_call(self, caller, context):
+ """Try to infer what type.__new__(mcs, name, bases, attrs) returns.
+
+ In order for such call to be valid, the metaclass needs to be
+ a subtype of ``type``, the name needs to be a string, the bases
+ needs to be a tuple of classes
+ """
+ from astroid import node_classes
+
+ # Verify the metaclass
+ mcs = next(caller.args[0].infer(context=context))
+ if mcs.__class__.__name__ != "ClassDef":
+ # Not a valid first argument.
+ return None
+ if not mcs.is_subtype_of("%s.type" % BUILTINS):
+ # Not a valid metaclass.
+ return None
+
+ # Verify the name
+ name = next(caller.args[1].infer(context=context))
+ if name.__class__.__name__ != "Const":
+ # Not a valid name, needs to be a const.
+ return None
+ if not isinstance(name.value, str):
+ # Needs to be a string.
+ return None
+
+ # Verify the bases
+ bases = next(caller.args[2].infer(context=context))
+ if bases.__class__.__name__ != "Tuple":
+ # Needs to be a tuple.
+ return None
+ inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts]
+ if any(base.__class__.__name__ != "ClassDef" for base in inferred_bases):
+ # All the bases needs to be Classes
+ return None
+
+ # Verify the attributes.
+ attrs = next(caller.args[3].infer(context=context))
+ if attrs.__class__.__name__ != "Dict":
+ # Needs to be a dictionary.
+ return None
+ cls_locals = collections.defaultdict(list)
+ for key, value in attrs.items:
+ key = next(key.infer(context=context))
+ value = next(value.infer(context=context))
+ # Ignore non string keys
+ if key.__class__.__name__ == "Const" and isinstance(key.value, str):
+ cls_locals[key.value].append(value)
+
+ # Build the class from now.
+ cls = mcs.__class__(
+ name=name.value,
+ lineno=caller.lineno,
+ col_offset=caller.col_offset,
+ parent=caller,
+ )
+ empty = node_classes.Pass()
+ cls.postinit(
+ bases=bases.elts,
+ body=[empty],
+ decorators=[],
+ newstyle=True,
+ metaclass=mcs,
+ keywords=[],
+ )
+ cls.locals = cls_locals
+ return cls
+
+ def infer_call_result(self, caller, context=None):
+ context = contextmod.bind_context_to_node(context, self.bound)
+ if (
+ self.bound.__class__.__name__ == "ClassDef"
+ and self.bound.name == "type"
+ and self.name == "__new__"
+ and len(caller.args) == 4
+ ):
+ # Check if we have a ``type.__new__(mcs, name, bases, attrs)`` call.
+ new_cls = self._infer_type_new_call(caller, context)
+ if new_cls:
+ return iter((new_cls,))
+
+ return super(BoundMethod, self).infer_call_result(caller, context)
+
+ def bool_value(self):
+ return True
+
+
+class Generator(BaseInstance):
+ """a special node representing a generator.
+
+ Proxied class is set once for all in raw_building.
+ """
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel())
+
+ # pylint: disable=super-init-not-called
+ def __init__(self, parent=None):
+ self.parent = parent
+
+ def callable(self):
+ return False
+
+ def pytype(self):
+ return "%s.generator" % BUILTINS
+
+ def display_type(self):
+ return "Generator"
+
+ def bool_value(self):
+ return True
+
+ def __repr__(self):
+ return "" % (
+ self._proxied.name,
+ self.lineno,
+ id(self),
+ )
+
+ def __str__(self):
+ return "Generator(%s)" % (self._proxied.name)
+
+
+class AsyncGenerator(Generator):
+ """Special node representing an async generator"""
+
+ def pytype(self):
+ return "%s.async_generator" % BUILTINS
+
+ def display_type(self):
+ return "AsyncGenerator"
+
+ def __repr__(self):
+ return "" % (
+ self._proxied.name,
+ self.lineno,
+ id(self),
+ )
+
+ def __str__(self):
+ return "AsyncGenerator(%s)" % (self._proxied.name)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc
new file mode 100644
index 0000000..473b2ec
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc
new file mode 100644
index 0000000..8a5e3e2
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc
new file mode 100644
index 0000000..f7110e3
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc
new file mode 100644
index 0000000..9f6475c
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc
new file mode 100644
index 0000000..89f1579
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc
new file mode 100644
index 0000000..e305bc0
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc
new file mode 100644
index 0000000..de0b1c9
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc
new file mode 100644
index 0000000..fbb970c
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc
new file mode 100644
index 0000000..a198b13
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc
new file mode 100644
index 0000000..de9ad58
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc
new file mode 100644
index 0000000..e879d1b
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc
new file mode 100644
index 0000000..235bea9
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc
new file mode 100644
index 0000000..5f68a42
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc
new file mode 100644
index 0000000..7eddd1a
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc
new file mode 100644
index 0000000..91dc680
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy.cpython-37.pyc
new file mode 100644
index 0000000..a708632
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc
new file mode 100644
index 0000000..dde3db2
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc
new file mode 100644
index 0000000..1207cf3
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc
new file mode 100644
index 0000000..131753f
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc
new file mode 100644
index 0000000..9f35767
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc
new file mode 100644
index 0000000..604246f
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc
new file mode 100644
index 0000000..29daef7
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc
new file mode 100644
index 0000000..2947d8a
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc
new file mode 100644
index 0000000..cd7031a
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc
new file mode 100644
index 0000000..3a55768
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc
new file mode 100644
index 0000000..3401a84
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc
new file mode 100644
index 0000000..d93ed26
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_argparse.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_argparse.py
new file mode 100644
index 0000000..d489911
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_argparse.py
@@ -0,0 +1,33 @@
+from astroid import MANAGER, arguments, nodes, inference_tip, UseInferenceDefault
+
+
+def infer_namespace(node, context=None):
+ callsite = arguments.CallSite.from_call(node)
+ if not callsite.keyword_arguments:
+ # Cannot make sense of it.
+ raise UseInferenceDefault()
+
+ class_node = nodes.ClassDef("Namespace", "docstring")
+ class_node.parent = node.parent
+ for attr in set(callsite.keyword_arguments):
+ fake_node = nodes.EmptyNode()
+ fake_node.parent = class_node
+ fake_node.attrname = attr
+ class_node.instance_attrs[attr] = [fake_node]
+ return iter((class_node.instantiate_class(),))
+
+
+def _looks_like_namespace(node):
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ return (
+ func.attrname == "Namespace"
+ and isinstance(func.expr, nodes.Name)
+ and func.expr.name == "argparse"
+ )
+ return False
+
+
+MANAGER.register_transform(
+ nodes.Call, inference_tip(infer_namespace), _looks_like_namespace
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_attrs.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_attrs.py
new file mode 100644
index 0000000..74d1aa3
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_attrs.py
@@ -0,0 +1,60 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+"""
+Astroid hook for the attrs library
+
+Without this hook pylint reports unsupported-assignment-operation
+for atrrs classes
+"""
+
+import astroid
+from astroid import MANAGER
+
+
+ATTR_IB = "attr.ib"
+
+
+def is_decorated_with_attrs(
+ node, decorator_names=("attr.s", "attr.attrs", "attr.attributes")
+):
+ """Return True if a decorated node has
+ an attr decorator applied."""
+ if not node.decorators:
+ return False
+ for decorator_attribute in node.decorators.nodes:
+ if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
+ decorator_attribute = decorator_attribute.func
+ if decorator_attribute.as_string() in decorator_names:
+ return True
+ return False
+
+
+def attr_attributes_transform(node):
+ """Given that the ClassNode has an attr decorator,
+ rewrite class attributes as instance attributes
+ """
+ # Astroid can't infer this attribute properly
+ # Prevents https://github.com/PyCQA/pylint/issues/1884
+ node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
+
+ for cdefbodynode in node.body:
+ if not isinstance(cdefbodynode, astroid.Assign):
+ continue
+ if isinstance(cdefbodynode.value, astroid.Call):
+ if cdefbodynode.value.func.as_string() != ATTR_IB:
+ continue
+ else:
+ continue
+ for target in cdefbodynode.targets:
+
+ rhs_node = astroid.Unknown(
+ lineno=cdefbodynode.lineno,
+ col_offset=cdefbodynode.col_offset,
+ parent=cdefbodynode,
+ )
+ node.locals[target.name] = [rhs_node]
+
+
+MANAGER.register_transform(
+ astroid.ClassDef, attr_attributes_transform, is_decorated_with_attrs
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py
new file mode 100644
index 0000000..2655095
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py
@@ -0,0 +1,823 @@
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Rene Zhang
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for various builtins."""
+
+from functools import partial
+from textwrap import dedent
+
+import six
+from astroid import (
+ MANAGER,
+ Instance,
+ UseInferenceDefault,
+ AttributeInferenceError,
+ inference_tip,
+ InferenceError,
+ NameInferenceError,
+ AstroidTypeError,
+ MroError,
+)
+from astroid import arguments
+from astroid.builder import AstroidBuilder
+from astroid import helpers
+from astroid import nodes
+from astroid import objects
+from astroid import scoped_nodes
+from astroid import util
+
+
+OBJECT_DUNDER_NEW = "object.__new__"
+
+
+def _extend_str(class_node, rvalue):
+ """function to extend builtin str/unicode class"""
+ code = dedent(
+ """
+ class whatever(object):
+ def join(self, iterable):
+ return {rvalue}
+ def replace(self, old, new, count=None):
+ return {rvalue}
+ def format(self, *args, **kwargs):
+ return {rvalue}
+ def encode(self, encoding='ascii', errors=None):
+ return ''
+ def decode(self, encoding='ascii', errors=None):
+ return u''
+ def capitalize(self):
+ return {rvalue}
+ def title(self):
+ return {rvalue}
+ def lower(self):
+ return {rvalue}
+ def upper(self):
+ return {rvalue}
+ def swapcase(self):
+ return {rvalue}
+ def index(self, sub, start=None, end=None):
+ return 0
+ def find(self, sub, start=None, end=None):
+ return 0
+ def count(self, sub, start=None, end=None):
+ return 0
+ def strip(self, chars=None):
+ return {rvalue}
+ def lstrip(self, chars=None):
+ return {rvalue}
+ def rstrip(self, chars=None):
+ return {rvalue}
+ def rjust(self, width, fillchar=None):
+ return {rvalue}
+ def center(self, width, fillchar=None):
+ return {rvalue}
+ def ljust(self, width, fillchar=None):
+ return {rvalue}
+ """
+ )
+ code = code.format(rvalue=rvalue)
+ fake = AstroidBuilder(MANAGER).string_build(code)["whatever"]
+ for method in fake.mymethods():
+ method.parent = class_node
+ method.lineno = None
+ method.col_offset = None
+ if "__class__" in method.locals:
+ method.locals["__class__"] = [class_node]
+ class_node.locals[method.name] = [method]
+ method.parent = class_node
+
+
+def _extend_builtins(class_transforms):
+ from astroid.bases import BUILTINS
+
+ builtin_ast = MANAGER.astroid_cache[BUILTINS]
+ for class_name, transform in class_transforms.items():
+ transform(builtin_ast[class_name])
+
+
+_extend_builtins(
+ {
+ "bytes": partial(_extend_str, rvalue="b''"),
+ "str": partial(_extend_str, rvalue="''"),
+ }
+)
+
+
+def _builtin_filter_predicate(node, builtin_name):
+ if isinstance(node.func, nodes.Name) and node.func.name == builtin_name:
+ return True
+ if isinstance(node.func, nodes.Attribute):
+ return (
+ node.func.attrname == "fromkeys"
+ and isinstance(node.func.expr, nodes.Name)
+ and node.func.expr.name == "dict"
+ )
+ return False
+
+
+def register_builtin_transform(transform, builtin_name):
+ """Register a new transform function for the given *builtin_name*.
+
+ The transform function must accept two parameters, a node and
+ an optional context.
+ """
+
+ def _transform_wrapper(node, context=None):
+ result = transform(node, context=context)
+ if result:
+ if not result.parent:
+ # Let the transformation function determine
+ # the parent for its result. Otherwise,
+ # we set it to be the node we transformed from.
+ result.parent = node
+
+ if result.lineno is None:
+ result.lineno = node.lineno
+ if result.col_offset is None:
+ result.col_offset = node.col_offset
+ return iter([result])
+
+ MANAGER.register_transform(
+ nodes.Call,
+ inference_tip(_transform_wrapper),
+ partial(_builtin_filter_predicate, builtin_name=builtin_name),
+ )
+
+
+def _generic_inference(node, context, node_type, transform):
+ args = node.args
+ if not args:
+ return node_type()
+ if len(node.args) > 1:
+ raise UseInferenceDefault()
+
+ arg, = args
+ transformed = transform(arg)
+ if not transformed:
+ try:
+ inferred = next(arg.infer(context=context))
+ except (InferenceError, StopIteration):
+ raise UseInferenceDefault()
+ if inferred is util.Uninferable:
+ raise UseInferenceDefault()
+ transformed = transform(inferred)
+ if not transformed or transformed is util.Uninferable:
+ raise UseInferenceDefault()
+ return transformed
+
+
+def _generic_transform(arg, klass, iterables, build_elts):
+ if isinstance(arg, klass):
+ return arg
+ elif isinstance(arg, iterables):
+ if not all(isinstance(elt, nodes.Const) for elt in arg.elts):
+ raise UseInferenceDefault()
+ elts = [elt.value for elt in arg.elts]
+ elif isinstance(arg, nodes.Dict):
+ if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
+ raise UseInferenceDefault()
+ elts = [item[0].value for item in arg.items]
+ elif isinstance(arg, nodes.Const) and isinstance(
+ arg.value, (six.string_types, six.binary_type)
+ ):
+ elts = arg.value
+ else:
+ return
+ return klass.from_constants(elts=build_elts(elts))
+
+
+def _infer_builtin(node, context, klass=None, iterables=None, build_elts=None):
+ transform_func = partial(
+ _generic_transform, klass=klass, iterables=iterables, build_elts=build_elts
+ )
+
+ return _generic_inference(node, context, klass, transform_func)
+
+
+# pylint: disable=invalid-name
+infer_tuple = partial(
+ _infer_builtin,
+ klass=nodes.Tuple,
+ iterables=(
+ nodes.List,
+ nodes.Set,
+ objects.FrozenSet,
+ objects.DictItems,
+ objects.DictKeys,
+ objects.DictValues,
+ ),
+ build_elts=tuple,
+)
+
+infer_list = partial(
+ _infer_builtin,
+ klass=nodes.List,
+ iterables=(
+ nodes.Tuple,
+ nodes.Set,
+ objects.FrozenSet,
+ objects.DictItems,
+ objects.DictKeys,
+ objects.DictValues,
+ ),
+ build_elts=list,
+)
+
+infer_set = partial(
+ _infer_builtin,
+ klass=nodes.Set,
+ iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys),
+ build_elts=set,
+)
+
+infer_frozenset = partial(
+ _infer_builtin,
+ klass=objects.FrozenSet,
+ iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys),
+ build_elts=frozenset,
+)
+
+
+def _get_elts(arg, context):
+ is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
+ try:
+ inferred = next(arg.infer(context))
+ except (InferenceError, NameInferenceError):
+ raise UseInferenceDefault()
+ if isinstance(inferred, nodes.Dict):
+ items = inferred.items
+ elif is_iterable(inferred):
+ items = []
+ for elt in inferred.elts:
+ # If an item is not a pair of two items,
+ # then fallback to the default inference.
+ # Also, take in consideration only hashable items,
+ # tuples and consts. We are choosing Names as well.
+ if not is_iterable(elt):
+ raise UseInferenceDefault()
+ if len(elt.elts) != 2:
+ raise UseInferenceDefault()
+ if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)):
+ raise UseInferenceDefault()
+ items.append(tuple(elt.elts))
+ else:
+ raise UseInferenceDefault()
+ return items
+
+
+def infer_dict(node, context=None):
+ """Try to infer a dict call to a Dict node.
+
+ The function treats the following cases:
+
+ * dict()
+ * dict(mapping)
+ * dict(iterable)
+ * dict(iterable, **kwargs)
+ * dict(mapping, **kwargs)
+ * dict(**kwargs)
+
+ If a case can't be inferred, we'll fallback to default inference.
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.has_invalid_arguments() or call.has_invalid_keywords():
+ raise UseInferenceDefault
+
+ args = call.positional_arguments
+ kwargs = list(call.keyword_arguments.items())
+
+ if not args and not kwargs:
+ # dict()
+ return nodes.Dict()
+ elif kwargs and not args:
+ # dict(a=1, b=2, c=4)
+ items = [(nodes.Const(key), value) for key, value in kwargs]
+ elif len(args) == 1 and kwargs:
+ # dict(some_iterable, b=2, c=4)
+ elts = _get_elts(args[0], context)
+ keys = [(nodes.Const(key), value) for key, value in kwargs]
+ items = elts + keys
+ elif len(args) == 1:
+ items = _get_elts(args[0], context)
+ else:
+ raise UseInferenceDefault()
+
+ value = nodes.Dict(
+ col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
+ )
+ value.postinit(items)
+ return value
+
+
+def infer_super(node, context=None):
+ """Understand super calls.
+
+ There are some restrictions for what can be understood:
+
+ * unbounded super (one argument form) is not understood.
+
+ * if the super call is not inside a function (classmethod or method),
+ then the default inference will be used.
+
+ * if the super arguments can't be inferred, the default inference
+ will be used.
+ """
+ if len(node.args) == 1:
+ # Ignore unbounded super.
+ raise UseInferenceDefault
+
+ scope = node.scope()
+ if not isinstance(scope, nodes.FunctionDef):
+ # Ignore non-method uses of super.
+ raise UseInferenceDefault
+ if scope.type not in ("classmethod", "method"):
+ # Not interested in staticmethods.
+ raise UseInferenceDefault
+
+ cls = scoped_nodes.get_wrapping_class(scope)
+ if not len(node.args):
+ mro_pointer = cls
+ # In we are in a classmethod, the interpreter will fill
+ # automatically the class as the second argument, not an instance.
+ if scope.type == "classmethod":
+ mro_type = cls
+ else:
+ mro_type = cls.instantiate_class()
+ else:
+ try:
+ mro_pointer = next(node.args[0].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+ try:
+ mro_type = next(node.args[1].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+
+ if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
+ # No way we could understand this.
+ raise UseInferenceDefault
+
+ super_obj = objects.Super(
+ mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
+ )
+ super_obj.parent = node
+ return super_obj
+
+
+def _infer_getattr_args(node, context):
+ if len(node.args) not in (2, 3):
+ # Not a valid getattr call.
+ raise UseInferenceDefault
+
+ try:
+ obj = next(node.args[0].infer(context=context))
+ attr = next(node.args[1].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+
+ if obj is util.Uninferable or attr is util.Uninferable:
+ # If one of the arguments is something we can't infer,
+ # then also make the result of the getattr call something
+ # which is unknown.
+ return util.Uninferable, util.Uninferable
+
+ is_string = isinstance(attr, nodes.Const) and isinstance(
+ attr.value, six.string_types
+ )
+ if not is_string:
+ raise UseInferenceDefault
+
+ return obj, attr.value
+
+
+def infer_getattr(node, context=None):
+ """Understand getattr calls
+
+ If one of the arguments is an Uninferable object, then the
+ result will be an Uninferable object. Otherwise, the normal attribute
+ lookup will be done.
+ """
+ obj, attr = _infer_getattr_args(node, context)
+ if (
+ obj is util.Uninferable
+ or attr is util.Uninferable
+ or not hasattr(obj, "igetattr")
+ ):
+ return util.Uninferable
+
+ try:
+ return next(obj.igetattr(attr, context=context))
+ except (StopIteration, InferenceError, AttributeInferenceError):
+ if len(node.args) == 3:
+ # Try to infer the default and return it instead.
+ try:
+ return next(node.args[2].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+
+ raise UseInferenceDefault
+
+
+def infer_hasattr(node, context=None):
+ """Understand hasattr calls
+
+ This always guarantees three possible outcomes for calling
+ hasattr: Const(False) when we are sure that the object
+ doesn't have the intended attribute, Const(True) when
+ we know that the object has the attribute and Uninferable
+ when we are unsure of the outcome of the function call.
+ """
+ try:
+ obj, attr = _infer_getattr_args(node, context)
+ if (
+ obj is util.Uninferable
+ or attr is util.Uninferable
+ or not hasattr(obj, "getattr")
+ ):
+ return util.Uninferable
+ obj.getattr(attr, context=context)
+ except UseInferenceDefault:
+ # Can't infer something from this function call.
+ return util.Uninferable
+ except AttributeInferenceError:
+ # Doesn't have it.
+ return nodes.Const(False)
+ return nodes.Const(True)
+
+
+def infer_callable(node, context=None):
+ """Understand callable calls
+
+ This follows Python's semantics, where an object
+ is callable if it provides an attribute __call__,
+ even though that attribute is something which can't be
+ called.
+ """
+ if len(node.args) != 1:
+ # Invalid callable call.
+ raise UseInferenceDefault
+
+ argument = node.args[0]
+ try:
+ inferred = next(argument.infer(context=context))
+ except InferenceError:
+ return util.Uninferable
+ if inferred is util.Uninferable:
+ return util.Uninferable
+ return nodes.Const(inferred.callable())
+
+
+def infer_bool(node, context=None):
+ """Understand bool calls."""
+ if len(node.args) > 1:
+ # Invalid bool call.
+ raise UseInferenceDefault
+
+ if not node.args:
+ return nodes.Const(False)
+
+ argument = node.args[0]
+ try:
+ inferred = next(argument.infer(context=context))
+ except InferenceError:
+ return util.Uninferable
+ if inferred is util.Uninferable:
+ return util.Uninferable
+
+ bool_value = inferred.bool_value()
+ if bool_value is util.Uninferable:
+ return util.Uninferable
+ return nodes.Const(bool_value)
+
+
+def infer_type(node, context=None):
+ """Understand the one-argument form of *type*."""
+ if len(node.args) != 1:
+ raise UseInferenceDefault
+
+ return helpers.object_type(node.args[0], context)
+
+
+def infer_slice(node, context=None):
+ """Understand `slice` calls."""
+ args = node.args
+ if not 0 < len(args) <= 3:
+ raise UseInferenceDefault
+
+ args = list(map(helpers.safe_infer, args))
+ for arg in args:
+ if not arg or arg is util.Uninferable:
+ raise UseInferenceDefault
+ if not isinstance(arg, nodes.Const):
+ raise UseInferenceDefault
+ if not isinstance(arg.value, (type(None), int)):
+ raise UseInferenceDefault
+
+ if len(args) < 3:
+ # Make sure we have 3 arguments.
+ args.extend([None] * (3 - len(args)))
+
+ slice_node = nodes.Slice(
+ lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
+ )
+ slice_node.postinit(*args)
+ return slice_node
+
+
+def _infer_object__new__decorator(node, context=None):
+ # Instantiate class immediately
+ # since that's what @object.__new__ does
+ return iter((node.instantiate_class(),))
+
+
+def _infer_object__new__decorator_check(node):
+ """Predicate before inference_tip
+
+ Check if the given ClassDef has an @object.__new__ decorator
+ """
+ if not node.decorators:
+ return False
+
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, nodes.Attribute):
+ if decorator.as_string() == OBJECT_DUNDER_NEW:
+ return True
+ return False
+
+
+def infer_issubclass(callnode, context=None):
+ """Infer issubclass() calls
+
+ :param nodes.Call callnode: an `issubclass` call
+ :param InferenceContext: the context for the inference
+ :rtype nodes.Const: Boolean Const value of the `issubclass` call
+ :raises UseInferenceDefault: If the node cannot be inferred
+ """
+ call = arguments.CallSite.from_call(callnode)
+ if call.keyword_arguments:
+ # issubclass doesn't support keyword arguments
+ raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments")
+ if len(call.positional_arguments) != 2:
+ raise UseInferenceDefault(
+ "Expected two arguments, got {count}".format(
+ count=len(call.positional_arguments)
+ )
+ )
+ # The left hand argument is the obj to be checked
+ obj_node, class_or_tuple_node = call.positional_arguments
+
+ try:
+ obj_type = next(obj_node.infer(context=context))
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+ if not isinstance(obj_type, nodes.ClassDef):
+ raise UseInferenceDefault("TypeError: arg 1 must be class")
+
+ # The right hand argument is the class(es) that the given
+ # object is to be checked against.
+ try:
+ class_container = _class_or_tuple_to_container(
+ class_or_tuple_node, context=context
+ )
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+ try:
+ issubclass_bool = helpers.object_issubclass(obj_type, class_container, context)
+ except AstroidTypeError as exc:
+ raise UseInferenceDefault("TypeError: " + str(exc)) from exc
+ except MroError as exc:
+ raise UseInferenceDefault from exc
+ return nodes.Const(issubclass_bool)
+
+
+def infer_isinstance(callnode, context=None):
+ """Infer isinstance calls
+
+ :param nodes.Call callnode: an isinstance call
+ :param InferenceContext: context for call
+ (currently unused but is a common interface for inference)
+ :rtype nodes.Const: Boolean Const value of isinstance call
+
+ :raises UseInferenceDefault: If the node cannot be inferred
+ """
+ call = arguments.CallSite.from_call(callnode)
+ if call.keyword_arguments:
+ # isinstance doesn't support keyword arguments
+ raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments")
+ if len(call.positional_arguments) != 2:
+ raise UseInferenceDefault(
+ "Expected two arguments, got {count}".format(
+ count=len(call.positional_arguments)
+ )
+ )
+ # The left hand argument is the obj to be checked
+ obj_node, class_or_tuple_node = call.positional_arguments
+ # The right hand argument is the class(es) that the given
+ # obj is to be check is an instance of
+ try:
+ class_container = _class_or_tuple_to_container(
+ class_or_tuple_node, context=context
+ )
+ except InferenceError:
+ raise UseInferenceDefault
+ try:
+ isinstance_bool = helpers.object_isinstance(obj_node, class_container, context)
+ except AstroidTypeError as exc:
+ raise UseInferenceDefault("TypeError: " + str(exc))
+ except MroError as exc:
+ raise UseInferenceDefault from exc
+ if isinstance_bool is util.Uninferable:
+ raise UseInferenceDefault
+ return nodes.Const(isinstance_bool)
+
+
+def _class_or_tuple_to_container(node, context=None):
+ # Move inferences results into container
+ # to simplify later logic
+ # raises InferenceError if any of the inferences fall through
+ node_infer = next(node.infer(context=context))
+ # arg2 MUST be a type or a TUPLE of types
+ # for isinstance
+ if isinstance(node_infer, nodes.Tuple):
+ class_container = [
+ next(node.infer(context=context)) for node in node_infer.elts
+ ]
+ class_container = [
+ klass_node for klass_node in class_container if klass_node is not None
+ ]
+ else:
+ class_container = [node_infer]
+ return class_container
+
+
+def infer_len(node, context=None):
+ """Infer length calls
+
+ :param nodes.Call node: len call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Const: a Const node with the inferred length, if possible
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: len() must take no keyword arguments")
+ if len(call.positional_arguments) != 1:
+ raise UseInferenceDefault(
+ "TypeError: len() must take exactly one argument "
+ "({len}) given".format(len=len(call.positional_arguments))
+ )
+ [argument_node] = call.positional_arguments
+ try:
+ return nodes.Const(helpers.object_len(argument_node))
+ except (AstroidTypeError, InferenceError) as exc:
+ raise UseInferenceDefault(str(exc)) from exc
+
+
+def infer_str(node, context=None):
+ """Infer str() calls
+
+ :param nodes.Call node: str() call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Const: a Const containing an empty string
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: str() must take no keyword arguments")
+ try:
+ return nodes.Const("")
+ except (AstroidTypeError, InferenceError) as exc:
+ raise UseInferenceDefault(str(exc)) from exc
+
+
+def infer_int(node, context=None):
+ """Infer int() calls
+
+ :param nodes.Call node: int() call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Const: a Const containing the integer value of the int() call
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
+
+ if call.positional_arguments:
+ try:
+ first_value = next(call.positional_arguments[0].infer(context=context))
+ except InferenceError as exc:
+ raise UseInferenceDefault(str(exc)) from exc
+
+ if first_value is util.Uninferable:
+ raise UseInferenceDefault
+
+ if isinstance(first_value, nodes.Const) and isinstance(
+ first_value.value, (int, str)
+ ):
+ try:
+ actual_value = int(first_value.value)
+ except ValueError:
+ return nodes.Const(0)
+ return nodes.Const(actual_value)
+
+ return nodes.Const(0)
+
+
+def infer_dict_fromkeys(node, context=None):
+ """Infer dict.fromkeys
+
+ :param nodes.Call node: dict.fromkeys() call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Dict:
+ a Dictionary containing the values that astroid was able to infer.
+ In case the inference failed for any reason, an empty dictionary
+ will be inferred instead.
+ """
+
+ def _build_dict_with_elements(elements):
+ new_node = nodes.Dict(
+ col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
+ )
+ new_node.postinit(elements)
+ return new_node
+
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
+ if len(call.positional_arguments) not in {1, 2}:
+ raise UseInferenceDefault(
+ "TypeError: Needs between 1 and 2 positional arguments"
+ )
+
+ default = nodes.Const(None)
+ values = call.positional_arguments[0]
+ try:
+ inferred_values = next(values.infer(context=context))
+ except InferenceError:
+ return _build_dict_with_elements([])
+ if inferred_values is util.Uninferable:
+ return _build_dict_with_elements([])
+
+ # Limit to a couple of potential values, as this can become pretty complicated
+ accepted_iterable_elements = (nodes.Const,)
+ if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)):
+ elements = inferred_values.elts
+ for element in elements:
+ if not isinstance(element, accepted_iterable_elements):
+ # Fallback to an empty dict
+ return _build_dict_with_elements([])
+
+ elements_with_value = [(element, default) for element in elements]
+ return _build_dict_with_elements(elements_with_value)
+
+ elif isinstance(inferred_values, nodes.Const) and isinstance(
+ inferred_values.value, (str, bytes)
+ ):
+ elements = [
+ (nodes.Const(element), default) for element in inferred_values.value
+ ]
+ return _build_dict_with_elements(elements)
+ elif isinstance(inferred_values, nodes.Dict):
+ keys = inferred_values.itered()
+ for key in keys:
+ if not isinstance(key, accepted_iterable_elements):
+ # Fallback to an empty dict
+ return _build_dict_with_elements([])
+
+ elements_with_value = [(element, default) for element in keys]
+ return _build_dict_with_elements(elements_with_value)
+
+ # Fallback to an empty dictionary
+ return _build_dict_with_elements([])
+
+
+# Builtins inference
+register_builtin_transform(infer_bool, "bool")
+register_builtin_transform(infer_super, "super")
+register_builtin_transform(infer_callable, "callable")
+register_builtin_transform(infer_getattr, "getattr")
+register_builtin_transform(infer_hasattr, "hasattr")
+register_builtin_transform(infer_tuple, "tuple")
+register_builtin_transform(infer_set, "set")
+register_builtin_transform(infer_list, "list")
+register_builtin_transform(infer_dict, "dict")
+register_builtin_transform(infer_frozenset, "frozenset")
+register_builtin_transform(infer_type, "type")
+register_builtin_transform(infer_slice, "slice")
+register_builtin_transform(infer_isinstance, "isinstance")
+register_builtin_transform(infer_issubclass, "issubclass")
+register_builtin_transform(infer_len, "len")
+register_builtin_transform(infer_str, "str")
+register_builtin_transform(infer_int, "int")
+register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys")
+
+
+# Infer object.__new__ calls
+MANAGER.register_transform(
+ nodes.ClassDef,
+ inference_tip(_infer_object__new__decorator),
+ _infer_object__new__decorator_check,
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_collections.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_collections.py
new file mode 100644
index 0000000..b9eb9b9
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_collections.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2016-2017 Łukasz Rogalski
+# Copyright (c) 2017 Derek Gustafson
+# Copyright (c) 2018 Ioana Tagirta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import sys
+
+import astroid
+
+PY34 = sys.version_info >= (3, 4)
+PY35 = sys.version_info >= (3, 5)
+
+
+def _collections_transform():
+ return astroid.parse(
+ """
+ class defaultdict(dict):
+ default_factory = None
+ def __missing__(self, key): pass
+ def __getitem__(self, key): return default_factory
+
+ """
+ + _deque_mock()
+ + _ordered_dict_mock()
+ )
+
+
+def _deque_mock():
+ base_deque_class = """
+ class deque(object):
+ maxlen = 0
+ def __init__(self, iterable=None, maxlen=None):
+ self.iterable = iterable or []
+ def append(self, x): pass
+ def appendleft(self, x): pass
+ def clear(self): pass
+ def count(self, x): return 0
+ def extend(self, iterable): pass
+ def extendleft(self, iterable): pass
+ def pop(self): return self.iterable[0]
+ def popleft(self): return self.iterable[0]
+ def remove(self, value): pass
+ def reverse(self): return reversed(self.iterable)
+ def rotate(self, n=1): return self
+ def __iter__(self): return self
+ def __reversed__(self): return self.iterable[::-1]
+ def __getitem__(self, index): return self.iterable[index]
+ def __setitem__(self, index, value): pass
+ def __delitem__(self, index): pass
+ def __bool__(self): return bool(self.iterable)
+ def __nonzero__(self): return bool(self.iterable)
+ def __contains__(self, o): return o in self.iterable
+ def __len__(self): return len(self.iterable)
+ def __copy__(self): return deque(self.iterable)"""
+ if PY35:
+ base_deque_class += """
+ def copy(self): return deque(self.iterable)
+ def index(self, x, start=0, end=0): return 0
+ def insert(self, x, i): pass
+ def __add__(self, other): pass
+ def __iadd__(self, other): pass
+ def __mul__(self, other): pass
+ def __imul__(self, other): pass
+ def __rmul__(self, other): pass"""
+ return base_deque_class
+
+
+def _ordered_dict_mock():
+ base_ordered_dict_class = """
+ class OrderedDict(dict):
+ def __reversed__(self): return self[::-1]
+ """
+ if PY34:
+ base_ordered_dict_class += """
+ def move_to_end(self, key, last=False): pass"""
+ return base_ordered_dict_class
+
+
+astroid.register_module_extender(astroid.MANAGER, "collections", _collections_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_curses.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_curses.py
new file mode 100644
index 0000000..68e88b9
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_curses.py
@@ -0,0 +1,179 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import astroid
+
+
+def _curses_transform():
+ return astroid.parse(
+ """
+ A_ALTCHARSET = 1
+ A_BLINK = 1
+ A_BOLD = 1
+ A_DIM = 1
+ A_INVIS = 1
+ A_ITALIC = 1
+ A_NORMAL = 1
+ A_PROTECT = 1
+ A_REVERSE = 1
+ A_STANDOUT = 1
+ A_UNDERLINE = 1
+ A_HORIZONTAL = 1
+ A_LEFT = 1
+ A_LOW = 1
+ A_RIGHT = 1
+ A_TOP = 1
+ A_VERTICAL = 1
+ A_CHARTEXT = 1
+ A_ATTRIBUTES = 1
+ A_CHARTEXT = 1
+ A_COLOR = 1
+ KEY_MIN = 1
+ KEY_BREAK = 1
+ KEY_DOWN = 1
+ KEY_UP = 1
+ KEY_LEFT = 1
+ KEY_RIGHT = 1
+ KEY_HOME = 1
+ KEY_BACKSPACE = 1
+ KEY_F0 = 1
+ KEY_Fn = 1
+ KEY_DL = 1
+ KEY_IL = 1
+ KEY_DC = 1
+ KEY_IC = 1
+ KEY_EIC = 1
+ KEY_CLEAR = 1
+ KEY_EOS = 1
+ KEY_EOL = 1
+ KEY_SF = 1
+ KEY_SR = 1
+ KEY_NPAGE = 1
+ KEY_PPAGE = 1
+ KEY_STAB = 1
+ KEY_CTAB = 1
+ KEY_CATAB = 1
+ KEY_ENTER = 1
+ KEY_SRESET = 1
+ KEY_RESET = 1
+ KEY_PRINT = 1
+ KEY_LL = 1
+ KEY_A1 = 1
+ KEY_A3 = 1
+ KEY_B2 = 1
+ KEY_C1 = 1
+ KEY_C3 = 1
+ KEY_BTAB = 1
+ KEY_BEG = 1
+ KEY_CANCEL = 1
+ KEY_CLOSE = 1
+ KEY_COMMAND = 1
+ KEY_COPY = 1
+ KEY_CREATE = 1
+ KEY_END = 1
+ KEY_EXIT = 1
+ KEY_FIND = 1
+ KEY_HELP = 1
+ KEY_MARK = 1
+ KEY_MESSAGE = 1
+ KEY_MOVE = 1
+ KEY_NEXT = 1
+ KEY_OPEN = 1
+ KEY_OPTIONS = 1
+ KEY_PREVIOUS = 1
+ KEY_REDO = 1
+ KEY_REFERENCE = 1
+ KEY_REFRESH = 1
+ KEY_REPLACE = 1
+ KEY_RESTART = 1
+ KEY_RESUME = 1
+ KEY_SAVE = 1
+ KEY_SBEG = 1
+ KEY_SCANCEL = 1
+ KEY_SCOMMAND = 1
+ KEY_SCOPY = 1
+ KEY_SCREATE = 1
+ KEY_SDC = 1
+ KEY_SDL = 1
+ KEY_SELECT = 1
+ KEY_SEND = 1
+ KEY_SEOL = 1
+ KEY_SEXIT = 1
+ KEY_SFIND = 1
+ KEY_SHELP = 1
+ KEY_SHOME = 1
+ KEY_SIC = 1
+ KEY_SLEFT = 1
+ KEY_SMESSAGE = 1
+ KEY_SMOVE = 1
+ KEY_SNEXT = 1
+ KEY_SOPTIONS = 1
+ KEY_SPREVIOUS = 1
+ KEY_SPRINT = 1
+ KEY_SREDO = 1
+ KEY_SREPLACE = 1
+ KEY_SRIGHT = 1
+ KEY_SRSUME = 1
+ KEY_SSAVE = 1
+ KEY_SSUSPEND = 1
+ KEY_SUNDO = 1
+ KEY_SUSPEND = 1
+ KEY_UNDO = 1
+ KEY_MOUSE = 1
+ KEY_RESIZE = 1
+ KEY_MAX = 1
+ ACS_BBSS = 1
+ ACS_BLOCK = 1
+ ACS_BOARD = 1
+ ACS_BSBS = 1
+ ACS_BSSB = 1
+ ACS_BSSS = 1
+ ACS_BTEE = 1
+ ACS_BULLET = 1
+ ACS_CKBOARD = 1
+ ACS_DARROW = 1
+ ACS_DEGREE = 1
+ ACS_DIAMOND = 1
+ ACS_GEQUAL = 1
+ ACS_HLINE = 1
+ ACS_LANTERN = 1
+ ACS_LARROW = 1
+ ACS_LEQUAL = 1
+ ACS_LLCORNER = 1
+ ACS_LRCORNER = 1
+ ACS_LTEE = 1
+ ACS_NEQUAL = 1
+ ACS_PI = 1
+ ACS_PLMINUS = 1
+ ACS_PLUS = 1
+ ACS_RARROW = 1
+ ACS_RTEE = 1
+ ACS_S1 = 1
+ ACS_S3 = 1
+ ACS_S7 = 1
+ ACS_S9 = 1
+ ACS_SBBS = 1
+ ACS_SBSB = 1
+ ACS_SBSS = 1
+ ACS_SSBB = 1
+ ACS_SSBS = 1
+ ACS_SSSB = 1
+ ACS_SSSS = 1
+ ACS_STERLING = 1
+ ACS_TTEE = 1
+ ACS_UARROW = 1
+ ACS_ULCORNER = 1
+ ACS_URCORNER = 1
+ ACS_VLINE = 1
+ COLOR_BLACK = 1
+ COLOR_BLUE = 1
+ COLOR_CYAN = 1
+ COLOR_GREEN = 1
+ COLOR_MAGENTA = 1
+ COLOR_RED = 1
+ COLOR_WHITE = 1
+ COLOR_YELLOW = 1
+ """
+ )
+
+
+astroid.register_module_extender(astroid.MANAGER, "curses", _curses_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_dateutil.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_dateutil.py
new file mode 100644
index 0000000..a1c270f
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_dateutil.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2015 raylu
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for dateutil"""
+
+import textwrap
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def dateutil_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ textwrap.dedent(
+ """
+ import datetime
+ def parse(timestr, parserinfo=None, **kwargs):
+ return datetime.datetime()
+ """
+ )
+ )
+
+
+register_module_extender(MANAGER, "dateutil.parser", dateutil_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_fstrings.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_fstrings.py
new file mode 100644
index 0000000..7d8c7b6
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_fstrings.py
@@ -0,0 +1,51 @@
+# Copyright (c) 2017 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import collections
+import sys
+
+import astroid
+
+
+def _clone_node_with_lineno(node, parent, lineno):
+ cls = node.__class__
+ other_fields = node._other_fields
+ _astroid_fields = node._astroid_fields
+ init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
+ postinit_params = {param: getattr(node, param) for param in _astroid_fields}
+ if other_fields:
+ init_params.update({param: getattr(node, param) for param in other_fields})
+ new_node = cls(**init_params)
+ if hasattr(node, "postinit") and _astroid_fields:
+ for param, child in postinit_params.items():
+ if child and not isinstance(child, collections.Sequence):
+ cloned_child = _clone_node_with_lineno(
+ node=child, lineno=new_node.lineno, parent=new_node
+ )
+ postinit_params[param] = cloned_child
+ new_node.postinit(**postinit_params)
+ return new_node
+
+
+def _transform_formatted_value(node):
+ if node.value and node.value.lineno == 1:
+ if node.lineno != node.value.lineno:
+ new_node = astroid.FormattedValue(
+ lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
+ )
+ new_value = _clone_node_with_lineno(
+ node=node.value, lineno=node.lineno, parent=new_node
+ )
+ new_node.postinit(value=new_value, format_spec=node.format_spec)
+ return new_node
+
+
+if sys.version_info[:2] >= (3, 6):
+ # TODO: this fix tries to *patch* http://bugs.python.org/issue29051
+ # The problem is that FormattedValue.value, which is a Name node,
+ # has wrong line numbers, usually 1. This creates problems for pylint,
+ # which expects correct line numbers for things such as message control.
+ astroid.MANAGER.register_transform(
+ astroid.FormattedValue, _transform_formatted_value
+ )
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_functools.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_functools.py
new file mode 100644
index 0000000..93bc9d3
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_functools.py
@@ -0,0 +1,179 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2018 Bryce Guinta
+
+"""Astroid hooks for understanding functools library module."""
+from functools import partial
+from itertools import chain
+
+import astroid
+from astroid import arguments
+from astroid import BoundMethod
+from astroid import extract_node
+from astroid import helpers
+from astroid.interpreter import objectmodel
+from astroid import MANAGER
+
+
+LRU_CACHE = "functools.lru_cache"
+
+
+class LruWrappedModel(objectmodel.FunctionModel):
+ """Special attribute model for functions decorated with functools.lru_cache.
+
+ The said decorators patches at decoration time some functions onto
+ the decorated function.
+ """
+
+ @property
+ def py__wrapped__(self):
+ return self._instance
+
+ @property
+ def pycache_info(self):
+ cache_info = extract_node(
+ """
+ from functools import _CacheInfo
+ _CacheInfo(0, 0, 0, 0)
+ """
+ )
+
+ class CacheInfoBoundMethod(BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield helpers.safe_infer(cache_info)
+
+ return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
+
+ @property
+ def pycache_clear(self):
+ node = extract_node("""def cache_clear(self): pass""")
+ return BoundMethod(proxy=node, bound=self._instance.parent.scope())
+
+
+def _transform_lru_cache(node, context=None):
+ # TODO: this is not ideal, since the node should be immutable,
+ # but due to https://github.com/PyCQA/astroid/issues/354,
+ # there's not much we can do now.
+ # Replacing the node would work partially, because,
+ # in pylint, the old node would still be available, leading
+ # to spurious false positives.
+ node.special_attributes = LruWrappedModel()(node)
+ return
+
+
+def _functools_partial_inference(node, context=None):
+ call = arguments.CallSite.from_call(node)
+ number_of_positional = len(call.positional_arguments)
+ if number_of_positional < 1:
+ raise astroid.UseInferenceDefault(
+ "functools.partial takes at least one argument"
+ )
+ if number_of_positional == 1 and not call.keyword_arguments:
+ raise astroid.UseInferenceDefault(
+ "functools.partial needs at least to have some filled arguments"
+ )
+
+ partial_function = call.positional_arguments[0]
+ try:
+ inferred_wrapped_function = next(partial_function.infer(context=context))
+ except astroid.InferenceError as exc:
+ raise astroid.UseInferenceDefault from exc
+ if inferred_wrapped_function is astroid.Uninferable:
+ raise astroid.UseInferenceDefault("Cannot infer the wrapped function")
+ if not isinstance(inferred_wrapped_function, astroid.FunctionDef):
+ raise astroid.UseInferenceDefault("The wrapped function is not a function")
+
+ # Determine if the passed keywords into the callsite are supported
+ # by the wrapped function.
+ function_parameters = chain(
+ inferred_wrapped_function.args.args or (),
+ inferred_wrapped_function.args.kwonlyargs or (),
+ )
+ parameter_names = set(
+ param.name
+ for param in function_parameters
+ if isinstance(param, astroid.AssignName)
+ )
+ if set(call.keyword_arguments) - parameter_names:
+ raise astroid.UseInferenceDefault(
+ "wrapped function received unknown parameters"
+ )
+
+ # Return a wrapped() object that can be used further for inference
+ class PartialFunction(astroid.FunctionDef):
+
+ filled_positionals = len(call.positional_arguments[1:])
+ filled_keywords = list(call.keyword_arguments)
+
+ def infer_call_result(self, caller=None, context=None):
+ nonlocal call
+ filled_args = call.positional_arguments[1:]
+ filled_keywords = call.keyword_arguments
+
+ if context:
+ current_passed_keywords = {
+ keyword for (keyword, _) in context.callcontext.keywords
+ }
+ for keyword, value in filled_keywords.items():
+ if keyword not in current_passed_keywords:
+ context.callcontext.keywords.append((keyword, value))
+
+ call_context_args = context.callcontext.args or []
+ context.callcontext.args = filled_args + call_context_args
+
+ return super().infer_call_result(caller=caller, context=context)
+
+ partial_function = PartialFunction(
+ name=inferred_wrapped_function.name,
+ doc=inferred_wrapped_function.doc,
+ lineno=inferred_wrapped_function.lineno,
+ col_offset=inferred_wrapped_function.col_offset,
+ parent=inferred_wrapped_function.parent,
+ )
+ partial_function.postinit(
+ args=inferred_wrapped_function.args,
+ body=inferred_wrapped_function.body,
+ decorators=inferred_wrapped_function.decorators,
+ returns=inferred_wrapped_function.returns,
+ type_comment_returns=inferred_wrapped_function.type_comment_returns,
+ type_comment_args=inferred_wrapped_function.type_comment_args,
+ )
+ return iter((partial_function,))
+
+
+def _looks_like_lru_cache(node):
+ """Check if the given function node is decorated with lru_cache."""
+ if not node.decorators:
+ return False
+ for decorator in node.decorators.nodes:
+ if not isinstance(decorator, astroid.Call):
+ continue
+ if _looks_like_functools_member(decorator, "lru_cache"):
+ return True
+ return False
+
+
+def _looks_like_functools_member(node, member):
+ """Check if the given Call node is a functools.partial call"""
+ if isinstance(node.func, astroid.Name):
+ return node.func.name == member
+ elif isinstance(node.func, astroid.Attribute):
+ return (
+ node.func.attrname == member
+ and isinstance(node.func.expr, astroid.Name)
+ and node.func.expr.name == "functools"
+ )
+
+
+_looks_like_partial = partial(_looks_like_functools_member, member="partial")
+
+
+MANAGER.register_transform(
+ astroid.FunctionDef, _transform_lru_cache, _looks_like_lru_cache
+)
+
+
+MANAGER.register_transform(
+ astroid.Call,
+ astroid.inference_tip(_functools_partial_inference),
+ _looks_like_partial,
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_gi.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_gi.py
new file mode 100644
index 0000000..0970610
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_gi.py
@@ -0,0 +1,220 @@
+# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Cole Robinson
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 David Shea
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Giuseppe Scrivano
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the Python 2 GObject introspection bindings.
+
+Helps with understanding everything imported from 'gi.repository'
+"""
+
+import inspect
+import itertools
+import sys
+import re
+import warnings
+
+from astroid import MANAGER, AstroidBuildingError, nodes
+from astroid.builder import AstroidBuilder
+
+
+_inspected_modules = {}
+
+_identifier_re = r"^[A-Za-z_]\w*$"
+
+
+def _gi_build_stub(parent):
+ """
+ Inspect the passed module recursively and build stubs for functions,
+ classes, etc.
+ """
+ classes = {}
+ functions = {}
+ constants = {}
+ methods = {}
+ for name in dir(parent):
+ if name.startswith("__"):
+ continue
+
+ # Check if this is a valid name in python
+ if not re.match(_identifier_re, name):
+ continue
+
+ try:
+ obj = getattr(parent, name)
+ except:
+ continue
+
+ if inspect.isclass(obj):
+ classes[name] = obj
+ elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
+ functions[name] = obj
+ elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
+ methods[name] = obj
+ elif (
+ str(obj).startswith(", )
+ # Only accept function calls with two constant arguments
+ if len(node.args) != 2:
+ return False
+
+ if not all(isinstance(arg, nodes.Const) for arg in node.args):
+ return False
+
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ if func.attrname != "require_version":
+ return False
+ if isinstance(func.expr, nodes.Name) and func.expr.name == "gi":
+ return True
+
+ return False
+
+ if isinstance(func, nodes.Name):
+ return func.name == "require_version"
+
+ return False
+
+
+def _register_require_version(node):
+ # Load the gi.require_version locally
+ try:
+ import gi
+
+ gi.require_version(node.args[0].value, node.args[1].value)
+ except Exception:
+ pass
+
+ return node
+
+
+MANAGER.register_failed_import_hook(_import_gi_module)
+MANAGER.register_transform(
+ nodes.Call, _register_require_version, _looks_like_require_version
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_hashlib.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_hashlib.py
new file mode 100644
index 0000000..98ae774
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_hashlib.py
@@ -0,0 +1,67 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2018 Ioana Tagirta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import sys
+
+import six
+
+import astroid
+
+PY36 = sys.version_info >= (3, 6)
+
+
+def _hashlib_transform():
+ signature = "value=''"
+ template = """
+ class %(name)s(object):
+ def __init__(self, %(signature)s): pass
+ def digest(self):
+ return %(digest)s
+ def copy(self):
+ return self
+ def update(self, value): pass
+ def hexdigest(self):
+ return ''
+ @property
+ def name(self):
+ return %(name)r
+ @property
+ def block_size(self):
+ return 1
+ @property
+ def digest_size(self):
+ return 1
+ """
+ algorithms_with_signature = dict.fromkeys(
+ ["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
+ )
+ if PY36:
+ blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
+ person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
+ node_depth=0, inner_size=0, last_node=False"
+ blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
+ person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
+ node_depth=0, inner_size=0, last_node=False"
+ new_algorithms = dict.fromkeys(
+ ["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
+ signature,
+ )
+ algorithms_with_signature.update(new_algorithms)
+ algorithms_with_signature.update(
+ {"blake2b": blake2b_signature, "blake2s": blake2s_signature}
+ )
+ classes = "".join(
+ template
+ % {
+ "name": hashfunc,
+ "digest": 'b""' if six.PY3 else '""',
+ "signature": signature,
+ }
+ for hashfunc, signature in algorithms_with_signature.items()
+ )
+ return astroid.parse(classes)
+
+
+astroid.register_module_extender(astroid.MANAGER, "hashlib", _hashlib_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_io.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_io.py
new file mode 100644
index 0000000..4c68922
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_io.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2016 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid brain hints for some of the _io C objects."""
+
+import astroid
+
+
+BUFFERED = {"BufferedWriter", "BufferedReader"}
+TextIOWrapper = "TextIOWrapper"
+FileIO = "FileIO"
+BufferedWriter = "BufferedWriter"
+
+
+def _generic_io_transform(node, name, cls):
+ """Transform the given name, by adding the given *class* as a member of the node."""
+
+ io_module = astroid.MANAGER.ast_from_module_name("_io")
+ attribute_object = io_module[cls]
+ instance = attribute_object.instantiate_class()
+ node.locals[name] = [instance]
+
+
+def _transform_text_io_wrapper(node):
+ # This is not always correct, since it can vary with the type of the descriptor,
+ # being stdout, stderr or stdin. But we cannot get access to the name of the
+ # stream, which is why we are using the BufferedWriter class as a default
+ # value
+ return _generic_io_transform(node, name="buffer", cls=BufferedWriter)
+
+
+def _transform_buffered(node):
+ return _generic_io_transform(node, name="raw", cls=FileIO)
+
+
+astroid.MANAGER.register_transform(
+ astroid.ClassDef, _transform_buffered, lambda node: node.name in BUFFERED
+)
+astroid.MANAGER.register_transform(
+ astroid.ClassDef,
+ _transform_text_io_wrapper,
+ lambda node: node.name == TextIOWrapper,
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_mechanize.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_mechanize.py
new file mode 100644
index 0000000..93f282e
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_mechanize.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def mechanize_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ """
+
+class Browser(object):
+ def open(self, url, data=None, timeout=None):
+ return None
+ def open_novisit(self, url, data=None, timeout=None):
+ return None
+ def open_local_file(self, filename):
+ return None
+
+"""
+ )
+
+
+register_module_extender(MANAGER, "mechanize", mechanize_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py
new file mode 100644
index 0000000..be575e9
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py
@@ -0,0 +1,112 @@
+# Copyright (c) 2016 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+import sys
+
+import astroid
+from astroid import exceptions
+
+
+PY34 = sys.version_info >= (3, 4)
+
+
+def _multiprocessing_transform():
+ module = astroid.parse(
+ """
+ from multiprocessing.managers import SyncManager
+ def Manager():
+ return SyncManager()
+ """
+ )
+ if not PY34:
+ return module
+
+ # On Python 3.4, multiprocessing uses a getattr lookup inside contexts,
+ # in order to get the attributes they need. Since it's extremely
+ # dynamic, we use this approach to fake it.
+ node = astroid.parse(
+ """
+ from multiprocessing.context import DefaultContext, BaseContext
+ default = DefaultContext()
+ base = BaseContext()
+ """
+ )
+ try:
+ context = next(node["default"].infer())
+ base = next(node["base"].infer())
+ except exceptions.InferenceError:
+ return module
+
+ for node in (context, base):
+ for key, value in node.locals.items():
+ if key.startswith("_"):
+ continue
+
+ value = value[0]
+ if isinstance(value, astroid.FunctionDef):
+ # We need to rebound this, since otherwise
+ # it will have an extra argument (self).
+ value = astroid.BoundMethod(value, node)
+ module[key] = value
+ return module
+
+
+def _multiprocessing_managers_transform():
+ return astroid.parse(
+ """
+ import array
+ import threading
+ import multiprocessing.pool as pool
+
+ import six
+
+ class Namespace(object):
+ pass
+
+ class Value(object):
+ def __init__(self, typecode, value, lock=True):
+ self._typecode = typecode
+ self._value = value
+ def get(self):
+ return self._value
+ def set(self, value):
+ self._value = value
+ def __repr__(self):
+ return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
+ value = property(get, set)
+
+ def Array(typecode, sequence, lock=True):
+ return array.array(typecode, sequence)
+
+ class SyncManager(object):
+ Queue = JoinableQueue = six.moves.queue.Queue
+ Event = threading.Event
+ RLock = threading.RLock
+ BoundedSemaphore = threading.BoundedSemaphore
+ Condition = threading.Condition
+ Barrier = threading.Barrier
+ Pool = pool.Pool
+ list = list
+ dict = dict
+ Value = Value
+ Array = Array
+ Namespace = Namespace
+ __enter__ = lambda self: self
+ __exit__ = lambda *args: args
+
+ def start(self, initializer=None, initargs=None):
+ pass
+ def shutdown(self):
+ pass
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "multiprocessing.managers", _multiprocessing_managers_transform
+)
+astroid.register_module_extender(
+ astroid.MANAGER, "multiprocessing", _multiprocessing_transform
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py
new file mode 100644
index 0000000..85390a8
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py
@@ -0,0 +1,398 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Dmitry Pribysh
+# Copyright (c) 2015 David Shea
+# Copyright (c) 2015 Philip Lorenz
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Mateusz Bysiek
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2017 Łukasz Rogalski
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the Python standard library."""
+
+import functools
+import keyword
+from textwrap import dedent
+
+from astroid import MANAGER, UseInferenceDefault, inference_tip, InferenceError
+from astroid import arguments
+from astroid import exceptions
+from astroid import nodes
+from astroid.builder import AstroidBuilder, extract_node
+from astroid import util
+
+
+TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
+ENUM_BASE_NAMES = {"Enum", "IntEnum", "enum.Enum", "enum.IntEnum"}
+
+
+def _infer_first(node, context):
+ if node is util.Uninferable:
+ raise UseInferenceDefault
+ try:
+ value = next(node.infer(context=context))
+ if value is util.Uninferable:
+ raise UseInferenceDefault()
+ else:
+ return value
+ except StopIteration:
+ raise InferenceError()
+
+
+def _find_func_form_arguments(node, context):
+ def _extract_namedtuple_arg_or_keyword(position, key_name=None):
+
+ if len(args) > position:
+ return _infer_first(args[position], context)
+ if key_name and key_name in found_keywords:
+ return _infer_first(found_keywords[key_name], context)
+
+ args = node.args
+ keywords = node.keywords
+ found_keywords = (
+ {keyword.arg: keyword.value for keyword in keywords} if keywords else {}
+ )
+
+ name = _extract_namedtuple_arg_or_keyword(position=0, key_name="typename")
+ names = _extract_namedtuple_arg_or_keyword(position=1, key_name="field_names")
+ if name and names:
+ return name.value, names
+
+ raise UseInferenceDefault()
+
+
+def infer_func_form(node, base_type, context=None, enum=False):
+ """Specific inference function for namedtuple or Python 3 enum. """
+ # node is a Call node, class name as first argument and generated class
+ # attributes as second argument
+
+ # namedtuple or enums list of attributes can be a list of strings or a
+ # whitespace-separate string
+ try:
+ name, names = _find_func_form_arguments(node, context)
+ try:
+ attributes = names.value.replace(",", " ").split()
+ except AttributeError:
+ if not enum:
+ attributes = [
+ _infer_first(const, context).value for const in names.elts
+ ]
+ else:
+ # Enums supports either iterator of (name, value) pairs
+ # or mappings.
+ if hasattr(names, "items") and isinstance(names.items, list):
+ attributes = [
+ _infer_first(const[0], context).value
+ for const in names.items
+ if isinstance(const[0], nodes.Const)
+ ]
+ elif hasattr(names, "elts"):
+ # Enums can support either ["a", "b", "c"]
+ # or [("a", 1), ("b", 2), ...], but they can't
+ # be mixed.
+ if all(isinstance(const, nodes.Tuple) for const in names.elts):
+ attributes = [
+ _infer_first(const.elts[0], context).value
+ for const in names.elts
+ if isinstance(const, nodes.Tuple)
+ ]
+ else:
+ attributes = [
+ _infer_first(const, context).value for const in names.elts
+ ]
+ else:
+ raise AttributeError
+ if not attributes:
+ raise AttributeError
+ except (AttributeError, exceptions.InferenceError):
+ raise UseInferenceDefault()
+
+ # If we can't infer the name of the class, don't crash, up to this point
+ # we know it is a namedtuple anyway.
+ name = name or "Uninferable"
+ # we want to return a Class node instance with proper attributes set
+ class_node = nodes.ClassDef(name, "docstring")
+ class_node.parent = node.parent
+ # set base class=tuple
+ class_node.bases.append(base_type)
+ # XXX add __init__(*attributes) method
+ for attr in attributes:
+ fake_node = nodes.EmptyNode()
+ fake_node.parent = class_node
+ fake_node.attrname = attr
+ class_node.instance_attrs[attr] = [fake_node]
+ return class_node, name, attributes
+
+
+def _has_namedtuple_base(node):
+ """Predicate for class inference tip
+
+ :type node: ClassDef
+ :rtype: bool
+ """
+ return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
+
+
+def _looks_like(node, name):
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ return func.attrname == name
+ if isinstance(func, nodes.Name):
+ return func.name == name
+ return False
+
+
+_looks_like_namedtuple = functools.partial(_looks_like, name="namedtuple")
+_looks_like_enum = functools.partial(_looks_like, name="Enum")
+_looks_like_typing_namedtuple = functools.partial(_looks_like, name="NamedTuple")
+
+
+def infer_named_tuple(node, context=None):
+ """Specific inference function for namedtuple Call node"""
+ tuple_base_name = nodes.Name(name="tuple", parent=node.root())
+ class_node, name, attributes = infer_func_form(
+ node, tuple_base_name, context=context
+ )
+ call_site = arguments.CallSite.from_call(node)
+ func = next(extract_node("import collections; collections.namedtuple").infer())
+ try:
+ rename = next(call_site.infer_argument(func, "rename", context)).bool_value()
+ except InferenceError:
+ rename = False
+
+ if rename:
+ attributes = _get_renamed_namedtuple_attributes(attributes)
+
+ replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes)
+
+ field_def = (
+ " {name} = property(lambda self: self[{index:d}], "
+ "doc='Alias for field number {index:d}')"
+ )
+ field_defs = "\n".join(
+ field_def.format(name=name, index=index)
+ for index, name in enumerate(attributes)
+ )
+ fake = AstroidBuilder(MANAGER).string_build(
+ """
+class %(name)s(tuple):
+ __slots__ = ()
+ _fields = %(fields)r
+ def _asdict(self):
+ return self.__dict__
+ @classmethod
+ def _make(cls, iterable, new=tuple.__new__, len=len):
+ return new(cls, iterable)
+ def _replace(self, %(replace_args)s):
+ return self
+ def __getnewargs__(self):
+ return tuple(self)
+%(field_defs)s
+ """
+ % {
+ "name": name,
+ "fields": attributes,
+ "field_defs": field_defs,
+ "replace_args": replace_args,
+ }
+ )
+ class_node.locals["_asdict"] = fake.body[0].locals["_asdict"]
+ class_node.locals["_make"] = fake.body[0].locals["_make"]
+ class_node.locals["_replace"] = fake.body[0].locals["_replace"]
+ class_node.locals["_fields"] = fake.body[0].locals["_fields"]
+ for attr in attributes:
+ class_node.locals[attr] = fake.body[0].locals[attr]
+ # we use UseInferenceDefault, we can't be a generator so return an iterator
+ return iter([class_node])
+
+
+def _get_renamed_namedtuple_attributes(field_names):
+ names = list(field_names)
+ seen = set()
+ for i, name in enumerate(field_names):
+ if (
+ not all(c.isalnum() or c == "_" for c in name)
+ or keyword.iskeyword(name)
+ or not name
+ or name[0].isdigit()
+ or name.startswith("_")
+ or name in seen
+ ):
+ names[i] = "_%d" % i
+ seen.add(name)
+ return tuple(names)
+
+
+def infer_enum(node, context=None):
+ """ Specific inference function for enum Call node. """
+ enum_meta = extract_node(
+ """
+ class EnumMeta(object):
+ 'docstring'
+ def __call__(self, node):
+ class EnumAttribute(object):
+ name = ''
+ value = 0
+ return EnumAttribute()
+ def __iter__(self):
+ class EnumAttribute(object):
+ name = ''
+ value = 0
+ return [EnumAttribute()]
+ def __next__(self):
+ return next(iter(self))
+ def __getitem__(self, attr):
+ class Value(object):
+ @property
+ def name(self):
+ return ''
+ @property
+ def value(self):
+ return attr
+
+ return Value()
+ __members__ = ['']
+ """
+ )
+ class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0]
+ return iter([class_node.instantiate_class()])
+
+
+def infer_enum_class(node):
+ """ Specific inference for enums. """
+ for basename in node.basenames:
+ # TODO: doesn't handle subclasses yet. This implementation
+ # is a hack to support enums.
+ if basename not in ENUM_BASE_NAMES:
+ continue
+ if node.root().name == "enum":
+ # Skip if the class is directly from enum module.
+ break
+ for local, values in node.locals.items():
+ if any(not isinstance(value, nodes.AssignName) for value in values):
+ continue
+
+ targets = []
+ stmt = values[0].statement()
+ if isinstance(stmt, nodes.Assign):
+ if isinstance(stmt.targets[0], nodes.Tuple):
+ targets = stmt.targets[0].itered()
+ else:
+ targets = stmt.targets
+ elif isinstance(stmt, nodes.AnnAssign):
+ targets = [stmt.target]
+
+ inferred_return_value = None
+ if isinstance(stmt.value, nodes.Const):
+ if isinstance(stmt.value.value, str):
+ inferred_return_value = '"{}"'.format(stmt.value.value)
+ else:
+ inferred_return_value = stmt.value.value
+
+ new_targets = []
+ for target in targets:
+ # Replace all the assignments with our mocked class.
+ classdef = dedent(
+ """
+ class {name}({types}):
+ @property
+ def value(self):
+ return {return_value}
+ @property
+ def name(self):
+ return {name}
+ """.format(
+ name=target.name,
+ types=", ".join(node.basenames),
+ return_value=inferred_return_value,
+ )
+ )
+ fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
+ fake.parent = target.parent
+ for method in node.mymethods():
+ fake.locals[method.name] = [method]
+ new_targets.append(fake.instantiate_class())
+ node.locals[local] = new_targets
+ break
+ return node
+
+
+def infer_typing_namedtuple_class(class_node, context=None):
+ """Infer a subclass of typing.NamedTuple"""
+ # Check if it has the corresponding bases
+ annassigns_fields = [
+ annassign.target.name
+ for annassign in class_node.body
+ if isinstance(annassign, nodes.AnnAssign)
+ ]
+ code = dedent(
+ """
+ from collections import namedtuple
+ namedtuple({typename!r}, {fields!r})
+ """
+ ).format(typename=class_node.name, fields=",".join(annassigns_fields))
+ node = extract_node(code)
+ generated_class_node = next(infer_named_tuple(node, context))
+ for method in class_node.mymethods():
+ generated_class_node.locals[method.name] = [method]
+ return iter((generated_class_node,))
+
+
+def infer_typing_namedtuple(node, context=None):
+ """Infer a typing.NamedTuple(...) call."""
+ # This is essentially a namedtuple with different arguments
+ # so we extract the args and infer a named tuple.
+ try:
+ func = next(node.func.infer())
+ except InferenceError:
+ raise UseInferenceDefault
+
+ if func.qname() != "typing.NamedTuple":
+ raise UseInferenceDefault
+
+ if len(node.args) != 2:
+ raise UseInferenceDefault
+
+ if not isinstance(node.args[1], (nodes.List, nodes.Tuple)):
+ raise UseInferenceDefault
+
+ names = []
+ for elt in node.args[1].elts:
+ if not isinstance(elt, (nodes.List, nodes.Tuple)):
+ raise UseInferenceDefault
+ if len(elt.elts) != 2:
+ raise UseInferenceDefault
+ names.append(elt.elts[0].as_string())
+
+ typename = node.args[0].as_string()
+ node = extract_node(
+ "namedtuple(%(typename)s, (%(fields)s,)) "
+ % {"typename": typename, "fields": ",".join(names)}
+ )
+ return infer_named_tuple(node, context)
+
+
+MANAGER.register_transform(
+ nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple
+)
+MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), _looks_like_enum)
+MANAGER.register_transform(
+ nodes.ClassDef,
+ infer_enum_class,
+ predicate=lambda cls: any(
+ basename for basename in cls.basenames if basename in ENUM_BASE_NAMES
+ ),
+)
+MANAGER.register_transform(
+ nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base
+)
+MANAGER.register_transform(
+ nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_nose.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_nose.py
new file mode 100644
index 0000000..7b12d76
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_nose.py
@@ -0,0 +1,77 @@
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Hooks for nose library."""
+
+import re
+import textwrap
+
+import astroid
+import astroid.builder
+
+_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
+
+
+def _pep8(name, caps=re.compile("([A-Z])")):
+ return caps.sub(lambda m: "_" + m.groups()[0].lower(), name)
+
+
+def _nose_tools_functions():
+ """Get an iterator of names and bound methods."""
+ module = _BUILDER.string_build(
+ textwrap.dedent(
+ """
+ import unittest
+
+ class Test(unittest.TestCase):
+ pass
+ a = Test()
+ """
+ )
+ )
+ try:
+ case = next(module["a"].infer())
+ except astroid.InferenceError:
+ return
+ for method in case.methods():
+ if method.name.startswith("assert") and "_" not in method.name:
+ pep8_name = _pep8(method.name)
+ yield pep8_name, astroid.BoundMethod(method, case)
+ if method.name == "assertEqual":
+ # nose also exports assert_equals.
+ yield "assert_equals", astroid.BoundMethod(method, case)
+
+
+def _nose_tools_transform(node):
+ for method_name, method in _nose_tools_functions():
+ node.locals[method_name] = [method]
+
+
+def _nose_tools_trivial_transform():
+ """Custom transform for the nose.tools module."""
+ stub = _BUILDER.string_build("""__all__ = []""")
+ all_entries = ["ok_", "eq_"]
+
+ for pep8_name, method in _nose_tools_functions():
+ all_entries.append(pep8_name)
+ stub[pep8_name] = method
+
+ # Update the __all__ variable, since nose.tools
+ # does this manually with .append.
+ all_assign = stub["__all__"].parent
+ all_object = astroid.List(all_entries)
+ all_object.parent = all_assign
+ all_assign.value = all_object
+ return stub
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "nose.tools.trivial", _nose_tools_trivial_transform
+)
+astroid.MANAGER.register_transform(
+ astroid.Module, _nose_tools_transform, lambda n: n.name == "nose.tools"
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_numpy.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_numpy.py
new file mode 100644
index 0000000..9d31a47
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_numpy.py
@@ -0,0 +1,488 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+# Copyright (c) 2017-2018 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for numpy."""
+
+import astroid
+
+
+def numpy_random_mtrand_transform():
+ return astroid.parse(
+ """
+ def beta(a, b, size=None): pass
+ def binomial(n, p, size=None): pass
+ def bytes(length): pass
+ def chisquare(df, size=None): pass
+ def choice(a, size=None, replace=True, p=None): pass
+ def dirichlet(alpha, size=None): pass
+ def exponential(scale=1.0, size=None): pass
+ def f(dfnum, dfden, size=None): pass
+ def gamma(shape, scale=1.0, size=None): pass
+ def geometric(p, size=None): pass
+ def get_state(): pass
+ def gumbel(loc=0.0, scale=1.0, size=None): pass
+ def hypergeometric(ngood, nbad, nsample, size=None): pass
+ def laplace(loc=0.0, scale=1.0, size=None): pass
+ def logistic(loc=0.0, scale=1.0, size=None): pass
+ def lognormal(mean=0.0, sigma=1.0, size=None): pass
+ def logseries(p, size=None): pass
+ def multinomial(n, pvals, size=None): pass
+ def multivariate_normal(mean, cov, size=None): pass
+ def negative_binomial(n, p, size=None): pass
+ def noncentral_chisquare(df, nonc, size=None): pass
+ def noncentral_f(dfnum, dfden, nonc, size=None): pass
+ def normal(loc=0.0, scale=1.0, size=None): pass
+ def pareto(a, size=None): pass
+ def permutation(x): pass
+ def poisson(lam=1.0, size=None): pass
+ def power(a, size=None): pass
+ def rand(*args): pass
+ def randint(low, high=None, size=None, dtype='l'): pass
+ def randn(*args): pass
+ def random_integers(low, high=None, size=None): pass
+ def random_sample(size=None): pass
+ def rayleigh(scale=1.0, size=None): pass
+ def seed(seed=None): pass
+ def set_state(state): pass
+ def shuffle(x): pass
+ def standard_cauchy(size=None): pass
+ def standard_exponential(size=None): pass
+ def standard_gamma(shape, size=None): pass
+ def standard_normal(size=None): pass
+ def standard_t(df, size=None): pass
+ def triangular(left, mode, right, size=None): pass
+ def uniform(low=0.0, high=1.0, size=None): pass
+ def vonmises(mu, kappa, size=None): pass
+ def wald(mean, scale, size=None): pass
+ def weibull(a, size=None): pass
+ def zipf(a, size=None): pass
+ """
+ )
+
+
+def numpy_core_umath_transform():
+ ufunc_optional_keyword_arguments = (
+ """out=None, where=True, casting='same_kind', order='K', """
+ """dtype=None, subok=True"""
+ )
+ return astroid.parse(
+ """
+ # Constants
+ e = 2.718281828459045
+ euler_gamma = 0.5772156649015329
+
+ # No arg functions
+ def geterrobj(): pass
+
+ # One arg functions
+ def seterrobj(errobj): pass
+
+ # One arg functions with optional kwargs
+ def arccos(x, {opt_args:s}): pass
+ def arccosh(x, {opt_args:s}): pass
+ def arcsin(x, {opt_args:s}): pass
+ def arcsinh(x, {opt_args:s}): pass
+ def arctan(x, {opt_args:s}): pass
+ def arctanh(x, {opt_args:s}): pass
+ def cbrt(x, {opt_args:s}): pass
+ def conj(x, {opt_args:s}): pass
+ def conjugate(x, {opt_args:s}): pass
+ def cosh(x, {opt_args:s}): pass
+ def deg2rad(x, {opt_args:s}): pass
+ def degrees(x, {opt_args:s}): pass
+ def exp2(x, {opt_args:s}): pass
+ def expm1(x, {opt_args:s}): pass
+ def fabs(x, {opt_args:s}): pass
+ def frexp(x, {opt_args:s}): pass
+ def isfinite(x, {opt_args:s}): pass
+ def isinf(x, {opt_args:s}): pass
+ def log(x, {opt_args:s}): pass
+ def log1p(x, {opt_args:s}): pass
+ def log2(x, {opt_args:s}): pass
+ def logical_not(x, {opt_args:s}): pass
+ def modf(x, {opt_args:s}): pass
+ def negative(x, {opt_args:s}): pass
+ def rad2deg(x, {opt_args:s}): pass
+ def radians(x, {opt_args:s}): pass
+ def reciprocal(x, {opt_args:s}): pass
+ def rint(x, {opt_args:s}): pass
+ def sign(x, {opt_args:s}): pass
+ def signbit(x, {opt_args:s}): pass
+ def sinh(x, {opt_args:s}): pass
+ def spacing(x, {opt_args:s}): pass
+ def square(x, {opt_args:s}): pass
+ def tan(x, {opt_args:s}): pass
+ def tanh(x, {opt_args:s}): pass
+ def trunc(x, {opt_args:s}): pass
+
+ # Two args functions with optional kwargs
+ def bitwise_and(x1, x2, {opt_args:s}): pass
+ def bitwise_or(x1, x2, {opt_args:s}): pass
+ def bitwise_xor(x1, x2, {opt_args:s}): pass
+ def copysign(x1, x2, {opt_args:s}): pass
+ def divide(x1, x2, {opt_args:s}): pass
+ def equal(x1, x2, {opt_args:s}): pass
+ def float_power(x1, x2, {opt_args:s}): pass
+ def floor_divide(x1, x2, {opt_args:s}): pass
+ def fmax(x1, x2, {opt_args:s}): pass
+ def fmin(x1, x2, {opt_args:s}): pass
+ def fmod(x1, x2, {opt_args:s}): pass
+ def greater(x1, x2, {opt_args:s}): pass
+ def hypot(x1, x2, {opt_args:s}): pass
+ def ldexp(x1, x2, {opt_args:s}): pass
+ def left_shift(x1, x2, {opt_args:s}): pass
+ def less(x1, x2, {opt_args:s}): pass
+ def logaddexp(x1, x2, {opt_args:s}): pass
+ def logaddexp2(x1, x2, {opt_args:s}): pass
+ def logical_and(x1, x2, {opt_args:s}): pass
+ def logical_or(x1, x2, {opt_args:s}): pass
+ def logical_xor(x1, x2, {opt_args:s}): pass
+ def maximum(x1, x2, {opt_args:s}): pass
+ def minimum(x1, x2, {opt_args:s}): pass
+ def nextafter(x1, x2, {opt_args:s}): pass
+ def not_equal(x1, x2, {opt_args:s}): pass
+ def power(x1, x2, {opt_args:s}): pass
+ def remainder(x1, x2, {opt_args:s}): pass
+ def right_shift(x1, x2, {opt_args:s}): pass
+ def subtract(x1, x2, {opt_args:s}): pass
+ def true_divide(x1, x2, {opt_args:s}): pass
+ """.format(
+ opt_args=ufunc_optional_keyword_arguments
+ )
+ )
+
+
+def numpy_core_numerictypes_transform():
+ return astroid.parse(
+ """
+ # different types defined in numerictypes.py
+ class generic(object):
+ def __init__(self, value):
+ self.T = None
+ self.base = None
+ self.data = None
+ self.dtype = None
+ self.flags = None
+ self.flat = None
+ self.imag = None
+ self.itemsize = None
+ self.nbytes = None
+ self.ndim = None
+ self.real = None
+ self.size = None
+ self.strides = None
+
+ def all(self): pass
+ def any(self): pass
+ def argmax(self): pass
+ def argmin(self): pass
+ def argsort(self): pass
+ def astype(self): pass
+ def base(self): pass
+ def byteswap(self): pass
+ def choose(self): pass
+ def clip(self): pass
+ def compress(self): pass
+ def conj(self): pass
+ def conjugate(self): pass
+ def copy(self): pass
+ def cumprod(self): pass
+ def cumsum(self): pass
+ def data(self): pass
+ def diagonal(self): pass
+ def dtype(self): pass
+ def dump(self): pass
+ def dumps(self): pass
+ def fill(self): pass
+ def flags(self): pass
+ def flat(self): pass
+ def flatten(self): pass
+ def getfield(self): pass
+ def imag(self): pass
+ def item(self): pass
+ def itemset(self): pass
+ def itemsize(self): pass
+ def max(self): pass
+ def mean(self): pass
+ def min(self): pass
+ def nbytes(self): pass
+ def ndim(self): pass
+ def newbyteorder(self): pass
+ def nonzero(self): pass
+ def prod(self): pass
+ def ptp(self): pass
+ def put(self): pass
+ def ravel(self): pass
+ def real(self): pass
+ def repeat(self): pass
+ def reshape(self): pass
+ def resize(self): pass
+ def round(self): pass
+ def searchsorted(self): pass
+ def setfield(self): pass
+ def setflags(self): pass
+ def shape(self): pass
+ def size(self): pass
+ def sort(self): pass
+ def squeeze(self): pass
+ def std(self): pass
+ def strides(self): pass
+ def sum(self): pass
+ def swapaxes(self): pass
+ def take(self): pass
+ def tobytes(self): pass
+ def tofile(self): pass
+ def tolist(self): pass
+ def tostring(self): pass
+ def trace(self): pass
+ def transpose(self): pass
+ def var(self): pass
+ def view(self): pass
+
+
+ class dtype(object):
+ def __init__(self, obj, align=False, copy=False):
+ self.alignment = None
+ self.base = None
+ self.byteorder = None
+ self.char = None
+ self.descr = None
+ self.fields = None
+ self.flags = None
+ self.hasobject = None
+ self.isalignedstruct = None
+ self.isbuiltin = None
+ self.isnative = None
+ self.itemsize = None
+ self.kind = None
+ self.metadata = None
+ self.name = None
+ self.names = None
+ self.num = None
+ self.shape = None
+ self.str = None
+ self.subdtype = None
+ self.type = None
+
+ def newbyteorder(self, new_order='S'): pass
+
+
+ class ndarray(object):
+ def __init__(self, shape, dtype=float, buffer=None, offset=0,
+ strides=None, order=None):
+ self.T = None
+ self.base = None
+ self.ctypes = None
+ self.data = None
+ self.dtype = None
+ self.flags = None
+ self.flat = None
+ self.imag = None
+ self.itemsize = None
+ self.nbytes = None
+ self.ndim = None
+ self.real = None
+ self.shape = None
+ self.size = None
+ self.strides = None
+
+ def all(self): pass
+ def any(self): pass
+ def argmax(self): pass
+ def argmin(self): pass
+ def argpartition(self): pass
+ def argsort(self): pass
+ def astype(self): pass
+ def byteswap(self): pass
+ def choose(self): pass
+ def clip(self): pass
+ def compress(self): pass
+ def conj(self): pass
+ def conjugate(self): pass
+ def copy(self): pass
+ def cumprod(self): pass
+ def cumsum(self): pass
+ def diagonal(self): pass
+ def dot(self): pass
+ def dump(self): pass
+ def dumps(self): pass
+ def fill(self): pass
+ def flatten(self): pass
+ def getfield(self): pass
+ def item(self): pass
+ def itemset(self): pass
+ def max(self): pass
+ def mean(self): pass
+ def min(self): pass
+ def newbyteorder(self): pass
+ def nonzero(self): pass
+ def partition(self): pass
+ def prod(self): pass
+ def ptp(self): pass
+ def put(self): pass
+ def ravel(self): pass
+ def repeat(self): pass
+ def reshape(self): pass
+ def resize(self): pass
+ def round(self): pass
+ def searchsorted(self): pass
+ def setfield(self): pass
+ def setflags(self): pass
+ def sort(self): pass
+ def squeeze(self): pass
+ def std(self): pass
+ def sum(self): pass
+ def swapaxes(self): pass
+ def take(self): pass
+ def tobytes(self): pass
+ def tofile(self): pass
+ def tolist(self): pass
+ def tostring(self): pass
+ def trace(self): pass
+ def transpose(self): pass
+ def var(self): pass
+ def view(self): pass
+
+
+ class busdaycalendar(object):
+ def __init__(self, weekmask='1111100', holidays=None):
+ self.holidays = None
+ self.weekmask = None
+
+ class flexible(generic): pass
+ class bool_(generic): pass
+ class number(generic): pass
+ class datetime64(generic): pass
+
+
+ class void(flexible):
+ def __init__(self, *args, **kwargs):
+ self.base = None
+ self.dtype = None
+ self.flags = None
+ def getfield(self): pass
+ def setfield(self): pass
+
+
+ class character(flexible): pass
+
+
+ class integer(number):
+ def __init__(self, value):
+ self.denominator = None
+ self.numerator = None
+
+
+ class inexact(number): pass
+
+
+ class str_(str, character):
+ def maketrans(self, x, y=None, z=None): pass
+
+
+ class bytes_(bytes, character):
+ def fromhex(self, string): pass
+ def maketrans(self, frm, to): pass
+
+
+ class signedinteger(integer): pass
+
+
+ class unsignedinteger(integer): pass
+
+
+ class complexfloating(inexact): pass
+
+
+ class floating(inexact): pass
+
+
+ class float64(floating, float):
+ def fromhex(self, string): pass
+
+
+ class uint64(unsignedinteger): pass
+ class complex64(complexfloating): pass
+ class int16(signedinteger): pass
+ class float96(floating): pass
+ class int8(signedinteger): pass
+ class uint32(unsignedinteger): pass
+ class uint8(unsignedinteger): pass
+ class _typedict(dict): pass
+ class complex192(complexfloating): pass
+ class timedelta64(signedinteger): pass
+ class int32(signedinteger): pass
+ class uint16(unsignedinteger): pass
+ class float32(floating): pass
+ class complex128(complexfloating, complex): pass
+ class float16(floating): pass
+ class int64(signedinteger): pass
+
+ buffer_type = memoryview
+ bool8 = bool_
+ byte = int8
+ bytes0 = bytes_
+ cdouble = complex128
+ cfloat = complex128
+ clongdouble = complex192
+ clongfloat = complex192
+ complex_ = complex128
+ csingle = complex64
+ double = float64
+ float_ = float64
+ half = float16
+ int0 = int32
+ int_ = int32
+ intc = int32
+ intp = int32
+ long = int32
+ longcomplex = complex192
+ longdouble = float96
+ longfloat = float96
+ longlong = int64
+ object0 = object_
+ object_ = object_
+ short = int16
+ single = float32
+ singlecomplex = complex64
+ str0 = str_
+ string_ = bytes_
+ ubyte = uint8
+ uint = uint32
+ uint0 = uint32
+ uintc = uint32
+ uintp = uint32
+ ulonglong = uint64
+ unicode = str_
+ unicode_ = str_
+ ushort = uint16
+ void0 = void
+ """
+ )
+
+
+def numpy_funcs():
+ return astroid.parse(
+ """
+ import builtins
+ def sum(a, axis=None, dtype=None, out=None, keepdims=None):
+ return builtins.sum(a)
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.core.umath", numpy_core_umath_transform
+)
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.random.mtrand", numpy_random_mtrand_transform
+)
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.core.numerictypes", numpy_core_numerictypes_transform
+)
+astroid.register_module_extender(astroid.MANAGER, "numpy", numpy_funcs)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py
new file mode 100644
index 0000000..25e7649
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+import astroid
+from astroid import parse
+from astroid import inference_tip
+from astroid import register_module_extender
+from astroid import MANAGER
+
+
+def pkg_resources_transform():
+ return parse(
+ """
+def require(*requirements):
+ return pkg_resources.working_set.require(*requirements)
+
+def run_script(requires, script_name):
+ return pkg_resources.working_set.run_script(requires, script_name)
+
+def iter_entry_points(group, name=None):
+ return pkg_resources.working_set.iter_entry_points(group, name)
+
+def resource_exists(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).has_resource(resource_name)
+
+def resource_isdir(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).resource_isdir(
+ resource_name)
+
+def resource_filename(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).get_resource_filename(
+ self, resource_name)
+
+def resource_stream(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).get_resource_stream(
+ self, resource_name)
+
+def resource_string(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).get_resource_string(
+ self, resource_name)
+
+def resource_listdir(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).resource_listdir(
+ resource_name)
+
+def extraction_error():
+ pass
+
+def get_cache_path(archive_name, names=()):
+ extract_path = self.extraction_path or get_default_cache()
+ target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
+ return target_path
+
+def postprocess(tempname, filename):
+ pass
+
+def set_extraction_path(path):
+ pass
+
+def cleanup_resources(force=False):
+ pass
+
+def get_distribution(dist):
+ return Distribution(dist)
+
+_namespace_packages = {}
+"""
+ )
+
+
+register_module_extender(MANAGER, "pkg_resources", pkg_resources_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_pytest.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_pytest.py
new file mode 100644
index 0000000..d7e3ac8
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_pytest.py
@@ -0,0 +1,88 @@
+# Copyright (c) 2014-2016 Claudiu Popa
+# Copyright (c) 2014 Jeff Quast
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2016 Florian Bruhin
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for pytest."""
+from __future__ import absolute_import
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def pytest_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ """
+
+try:
+ import _pytest.mark
+ import _pytest.recwarn
+ import _pytest.runner
+ import _pytest.python
+ import _pytest.skipping
+ import _pytest.assertion
+except ImportError:
+ pass
+else:
+ deprecated_call = _pytest.recwarn.deprecated_call
+ warns = _pytest.recwarn.warns
+
+ exit = _pytest.runner.exit
+ fail = _pytest.runner.fail
+ skip = _pytest.runner.skip
+ importorskip = _pytest.runner.importorskip
+
+ xfail = _pytest.skipping.xfail
+ mark = _pytest.mark.MarkGenerator()
+ raises = _pytest.python.raises
+
+ # New in pytest 3.0
+ try:
+ approx = _pytest.python.approx
+ register_assert_rewrite = _pytest.assertion.register_assert_rewrite
+ except AttributeError:
+ pass
+
+
+# Moved in pytest 3.0
+
+try:
+ import _pytest.freeze_support
+ freeze_includes = _pytest.freeze_support.freeze_includes
+except ImportError:
+ try:
+ import _pytest.genscript
+ freeze_includes = _pytest.genscript.freeze_includes
+ except ImportError:
+ pass
+
+try:
+ import _pytest.debugging
+ set_trace = _pytest.debugging.pytestPDB().set_trace
+except ImportError:
+ try:
+ import _pytest.pdb
+ set_trace = _pytest.pdb.pytestPDB().set_trace
+ except ImportError:
+ pass
+
+try:
+ import _pytest.fixtures
+ fixture = _pytest.fixtures.fixture
+ yield_fixture = _pytest.fixtures.yield_fixture
+except ImportError:
+ try:
+ import _pytest.python
+ fixture = _pytest.python.fixture
+ yield_fixture = _pytest.python.yield_fixture
+ except ImportError:
+ pass
+"""
+ )
+
+
+register_module_extender(MANAGER, "pytest", pytest_transform)
+register_module_extender(MANAGER, "py.test", pytest_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_qt.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_qt.py
new file mode 100644
index 0000000..d65f218
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_qt.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+# Copyright (c) 2017 Roy Wright
+# Copyright (c) 2018 Ashley Whetter
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the PyQT library."""
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+from astroid import nodes
+from astroid import parse
+
+
+def _looks_like_signal(node, signal_name="pyqtSignal"):
+ if "__class__" in node.instance_attrs:
+ try:
+ cls = node.instance_attrs["__class__"][0]
+ return cls.name == signal_name
+ except AttributeError:
+ # return False if the cls does not have a name attribute
+ pass
+ return False
+
+
+def transform_pyqt_signal(node):
+ module = parse(
+ """
+ class pyqtSignal(object):
+ def connect(self, slot, type=None, no_receiver_check=False):
+ pass
+ def disconnect(self, slot):
+ pass
+ def emit(self, *args):
+ pass
+ """
+ )
+ signal_cls = module["pyqtSignal"]
+ node.instance_attrs["emit"] = signal_cls["emit"]
+ node.instance_attrs["disconnect"] = signal_cls["disconnect"]
+ node.instance_attrs["connect"] = signal_cls["connect"]
+
+
+def transform_pyside_signal(node):
+ module = parse(
+ """
+ class NotPySideSignal(object):
+ def connect(self, receiver, type=None):
+ pass
+ def disconnect(self, receiver):
+ pass
+ def emit(self, *args):
+ pass
+ """
+ )
+ signal_cls = module["NotPySideSignal"]
+ node.instance_attrs["connect"] = signal_cls["connect"]
+ node.instance_attrs["disconnect"] = signal_cls["disconnect"]
+ node.instance_attrs["emit"] = signal_cls["emit"]
+
+
+def pyqt4_qtcore_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ """
+
+def SIGNAL(signal_name): pass
+
+class QObject(object):
+ def emit(self, signal): pass
+"""
+ )
+
+
+register_module_extender(MANAGER, "PyQt4.QtCore", pyqt4_qtcore_transform)
+MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal)
+MANAGER.register_transform(
+ nodes.ClassDef,
+ transform_pyside_signal,
+ lambda node: node.qname() == "PySide.QtCore.Signal",
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_random.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_random.py
new file mode 100644
index 0000000..5ec858a
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_random.py
@@ -0,0 +1,75 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import random
+
+import astroid
+from astroid import helpers
+from astroid import MANAGER
+
+
+ACCEPTED_ITERABLES_FOR_SAMPLE = (astroid.List, astroid.Set, astroid.Tuple)
+
+
+def _clone_node_with_lineno(node, parent, lineno):
+ cls = node.__class__
+ other_fields = node._other_fields
+ _astroid_fields = node._astroid_fields
+ init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
+ postinit_params = {param: getattr(node, param) for param in _astroid_fields}
+ if other_fields:
+ init_params.update({param: getattr(node, param) for param in other_fields})
+ new_node = cls(**init_params)
+ if hasattr(node, "postinit") and _astroid_fields:
+ new_node.postinit(**postinit_params)
+ return new_node
+
+
+def infer_random_sample(node, context=None):
+ if len(node.args) != 2:
+ raise astroid.UseInferenceDefault
+
+ length = node.args[1]
+ if not isinstance(length, astroid.Const):
+ raise astroid.UseInferenceDefault
+ if not isinstance(length.value, int):
+ raise astroid.UseInferenceDefault
+
+ inferred_sequence = helpers.safe_infer(node.args[0], context=context)
+ if not inferred_sequence:
+ raise astroid.UseInferenceDefault
+
+ if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
+ raise astroid.UseInferenceDefault
+
+ if length.value > len(inferred_sequence.elts):
+ # In this case, this will raise a ValueError
+ raise astroid.UseInferenceDefault
+
+ try:
+ elts = random.sample(inferred_sequence.elts, length.value)
+ except ValueError:
+ raise astroid.UseInferenceDefault
+
+ new_node = astroid.List(
+ lineno=node.lineno, col_offset=node.col_offset, parent=node.scope()
+ )
+ new_elts = [
+ _clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
+ for elt in elts
+ ]
+ new_node.postinit(new_elts)
+ return iter((new_node,))
+
+
+def _looks_like_random_sample(node):
+ func = node.func
+ if isinstance(func, astroid.Attribute):
+ return func.attrname == "sample"
+ if isinstance(func, astroid.Name):
+ return func.name == "sample"
+ return False
+
+
+MANAGER.register_transform(
+ astroid.Call, astroid.inference_tip(infer_random_sample), _looks_like_random_sample
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_re.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_re.py
new file mode 100644
index 0000000..c7ee51a
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_re.py
@@ -0,0 +1,36 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import sys
+import astroid
+
+PY36 = sys.version_info >= (3, 6)
+
+if PY36:
+ # Since Python 3.6 there is the RegexFlag enum
+ # where every entry will be exposed via updating globals()
+
+ def _re_transform():
+ return astroid.parse(
+ """
+ import sre_compile
+ ASCII = sre_compile.SRE_FLAG_ASCII
+ IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE
+ LOCALE = sre_compile.SRE_FLAG_LOCALE
+ UNICODE = sre_compile.SRE_FLAG_UNICODE
+ MULTILINE = sre_compile.SRE_FLAG_MULTILINE
+ DOTALL = sre_compile.SRE_FLAG_DOTALL
+ VERBOSE = sre_compile.SRE_FLAG_VERBOSE
+ A = ASCII
+ I = IGNORECASE
+ L = LOCALE
+ U = UNICODE
+ M = MULTILINE
+ S = DOTALL
+ X = VERBOSE
+ TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE
+ T = TEMPLATE
+ DEBUG = sre_compile.SRE_FLAG_DEBUG
+ """
+ )
+
+ astroid.register_module_extender(astroid.MANAGER, "re", _re_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_six.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_six.py
new file mode 100644
index 0000000..b342fbf
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_six.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2014-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for six module."""
+
+from textwrap import dedent
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+from astroid.exceptions import (
+ AstroidBuildingError,
+ InferenceError,
+ AttributeInferenceError,
+)
+from astroid import nodes
+
+
+SIX_ADD_METACLASS = "six.add_metaclass"
+
+
+def _indent(text, prefix, predicate=None):
+ """Adds 'prefix' to the beginning of selected lines in 'text'.
+
+ If 'predicate' is provided, 'prefix' will only be added to the lines
+ where 'predicate(line)' is True. If 'predicate' is not provided,
+ it will default to adding 'prefix' to all non-empty lines that do not
+ consist solely of whitespace characters.
+ """
+ if predicate is None:
+ predicate = lambda line: line.strip()
+
+ def prefixed_lines():
+ for line in text.splitlines(True):
+ yield prefix + line if predicate(line) else line
+
+ return "".join(prefixed_lines())
+
+
+_IMPORTS = """
+import _io
+cStringIO = _io.StringIO
+filter = filter
+from itertools import filterfalse
+input = input
+from sys import intern
+map = map
+range = range
+from imp import reload as reload_module
+from functools import reduce
+from shlex import quote as shlex_quote
+from io import StringIO
+from collections import UserDict, UserList, UserString
+xrange = range
+zip = zip
+from itertools import zip_longest
+import builtins
+import configparser
+import copyreg
+import _dummy_thread
+import http.cookiejar as http_cookiejar
+import http.cookies as http_cookies
+import html.entities as html_entities
+import html.parser as html_parser
+import http.client as http_client
+import http.server as http_server
+BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
+import pickle as cPickle
+import queue
+import reprlib
+import socketserver
+import _thread
+import winreg
+import xmlrpc.server as xmlrpc_server
+import xmlrpc.client as xmlrpc_client
+import urllib.robotparser as urllib_robotparser
+import email.mime.multipart as email_mime_multipart
+import email.mime.nonmultipart as email_mime_nonmultipart
+import email.mime.text as email_mime_text
+import email.mime.base as email_mime_base
+import urllib.parse as urllib_parse
+import urllib.error as urllib_error
+import tkinter
+import tkinter.dialog as tkinter_dialog
+import tkinter.filedialog as tkinter_filedialog
+import tkinter.scrolledtext as tkinter_scrolledtext
+import tkinter.simpledialog as tkinder_simpledialog
+import tkinter.tix as tkinter_tix
+import tkinter.ttk as tkinter_ttk
+import tkinter.constants as tkinter_constants
+import tkinter.dnd as tkinter_dnd
+import tkinter.colorchooser as tkinter_colorchooser
+import tkinter.commondialog as tkinter_commondialog
+import tkinter.filedialog as tkinter_tkfiledialog
+import tkinter.font as tkinter_font
+import tkinter.messagebox as tkinter_messagebox
+import urllib
+import urllib.request as urllib_request
+import urllib.robotparser as urllib_robotparser
+import urllib.parse as urllib_parse
+import urllib.error as urllib_error
+"""
+
+
+def six_moves_transform():
+ code = dedent(
+ """
+ class Moves(object):
+ {}
+ moves = Moves()
+ """
+ ).format(_indent(_IMPORTS, " "))
+ module = AstroidBuilder(MANAGER).string_build(code)
+ module.name = "six.moves"
+ return module
+
+
+def _six_fail_hook(modname):
+ """Fix six.moves imports due to the dynamic nature of this
+ class.
+
+ Construct a pseudo-module which contains all the necessary imports
+ for six
+
+ :param modname: Name of failed module
+ :type modname: str
+
+ :return: An astroid module
+ :rtype: nodes.Module
+ """
+
+ attribute_of = modname != "six.moves" and modname.startswith("six.moves")
+ if modname != "six.moves" and not attribute_of:
+ raise AstroidBuildingError(modname=modname)
+ module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
+ module.name = "six.moves"
+ if attribute_of:
+ # Facilitate import of submodules in Moves
+ start_index = len(module.name)
+ attribute = modname[start_index:].lstrip(".").replace(".", "_")
+ try:
+ import_attr = module.getattr(attribute)[0]
+ except AttributeInferenceError:
+ raise AstroidBuildingError(modname=modname)
+ if isinstance(import_attr, nodes.Import):
+ submodule = MANAGER.ast_from_module_name(import_attr.names[0][0])
+ return submodule
+ # Let dummy submodule imports pass through
+ # This will cause an Uninferable result, which is okay
+ return module
+
+
+def _looks_like_decorated_with_six_add_metaclass(node):
+ if not node.decorators:
+ return False
+
+ for decorator in node.decorators.nodes:
+ if not isinstance(decorator, nodes.Call):
+ continue
+ if decorator.func.as_string() == SIX_ADD_METACLASS:
+ return True
+ return False
+
+
+def transform_six_add_metaclass(node):
+ """Check if the given class node is decorated with *six.add_metaclass*
+
+ If so, inject its argument as the metaclass of the underlying class.
+ """
+ if not node.decorators:
+ return
+
+ for decorator in node.decorators.nodes:
+ if not isinstance(decorator, nodes.Call):
+ continue
+
+ try:
+ func = next(decorator.func.infer())
+ except InferenceError:
+ continue
+ if func.qname() == SIX_ADD_METACLASS and decorator.args:
+ metaclass = decorator.args[0]
+ node._metaclass = metaclass
+ return node
+
+
+register_module_extender(MANAGER, "six", six_moves_transform)
+register_module_extender(
+ MANAGER, "requests.packages.urllib3.packages.six", six_moves_transform
+)
+MANAGER.register_failed_import_hook(_six_fail_hook)
+MANAGER.register_transform(
+ nodes.ClassDef,
+ transform_six_add_metaclass,
+ _looks_like_decorated_with_six_add_metaclass,
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_ssl.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_ssl.py
new file mode 100644
index 0000000..a42e04c
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_ssl.py
@@ -0,0 +1,73 @@
+# Copyright (c) 2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the ssl library."""
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+from astroid import nodes
+from astroid import parse
+
+
+def ssl_transform():
+ return parse(
+ """
+ from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
+ from _ssl import _SSLContext, MemoryBIO
+ from _ssl import (
+ SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
+ SSLSyscallError, SSLEOFError,
+ )
+ from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
+ from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
+ from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
+ try:
+ from _ssl import RAND_egd
+ except ImportError:
+ # LibreSSL does not provide RAND_egd
+ pass
+ from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE,
+ OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3,
+ OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2,
+ OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE)
+
+ from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE,
+ ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE,
+ ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE,
+ ALERT_DESCRIPTION_BAD_RECORD_MAC,
+ ALERT_DESCRIPTION_CERTIFICATE_EXPIRED,
+ ALERT_DESCRIPTION_CERTIFICATE_REVOKED,
+ ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN,
+ ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE,
+ ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR,
+ ALERT_DESCRIPTION_DECOMPRESSION_FAILURE,
+ ALERT_DESCRIPTION_DECRYPT_ERROR,
+ ALERT_DESCRIPTION_HANDSHAKE_FAILURE,
+ ALERT_DESCRIPTION_ILLEGAL_PARAMETER,
+ ALERT_DESCRIPTION_INSUFFICIENT_SECURITY,
+ ALERT_DESCRIPTION_INTERNAL_ERROR,
+ ALERT_DESCRIPTION_NO_RENEGOTIATION,
+ ALERT_DESCRIPTION_PROTOCOL_VERSION,
+ ALERT_DESCRIPTION_RECORD_OVERFLOW,
+ ALERT_DESCRIPTION_UNEXPECTED_MESSAGE,
+ ALERT_DESCRIPTION_UNKNOWN_CA,
+ ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY,
+ ALERT_DESCRIPTION_UNRECOGNIZED_NAME,
+ ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE,
+ ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION,
+ ALERT_DESCRIPTION_USER_CANCELLED)
+ from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL,
+ SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ,
+ SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN)
+ from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT
+ from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
+ from _ssl import _OPENSSL_API_VERSION
+ from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
+ """
+ )
+
+
+register_module_extender(MANAGER, "ssl", ssl_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_subprocess.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_subprocess.py
new file mode 100644
index 0000000..078b79f
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_subprocess.py
@@ -0,0 +1,107 @@
+# Copyright (c) 2016-2017 Claudiu Popa
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+import sys
+import textwrap
+
+import six
+
+import astroid
+
+
+PY34 = sys.version_info >= (3, 4)
+PY36 = sys.version_info >= (3, 6)
+PY33 = sys.version_info >= (3, 3)
+
+
+def _subprocess_transform():
+ if six.PY3:
+ communicate = (bytes("string", "ascii"), bytes("string", "ascii"))
+ communicate_signature = "def communicate(self, input=None, timeout=None)"
+ if PY36:
+ init = """
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0, restore_signals=True,
+ start_new_session=False, pass_fds=(), *,
+ encoding=None, errors=None):
+ pass
+ """
+ else:
+ init = """
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0, restore_signals=True,
+ start_new_session=False, pass_fds=()):
+ pass
+ """
+ else:
+ communicate = ("string", "string")
+ communicate_signature = "def communicate(self, input=None)"
+ init = """
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0):
+ pass
+ """
+ if PY34:
+ wait_signature = "def wait(self, timeout=None)"
+ else:
+ wait_signature = "def wait(self)"
+ if six.PY3:
+ ctx_manager = """
+ def __enter__(self): return self
+ def __exit__(self, *args): pass
+ """
+ else:
+ ctx_manager = ""
+ py3_args = ""
+ if PY33:
+ py3_args = "args = []"
+ code = textwrap.dedent(
+ """
+ class Popen(object):
+ returncode = pid = 0
+ stdin = stdout = stderr = file()
+ %(py3_args)s
+
+ %(communicate_signature)s:
+ return %(communicate)r
+ %(wait_signature)s:
+ return self.returncode
+ def poll(self):
+ return self.returncode
+ def send_signal(self, signal):
+ pass
+ def terminate(self):
+ pass
+ def kill(self):
+ pass
+ %(ctx_manager)s
+ """
+ % {
+ "communicate": communicate,
+ "communicate_signature": communicate_signature,
+ "wait_signature": wait_signature,
+ "ctx_manager": ctx_manager,
+ "py3_args": py3_args,
+ }
+ )
+
+ init_lines = textwrap.dedent(init).splitlines()
+ indented_init = "\n".join(" " * 4 + line for line in init_lines)
+ code += indented_init
+ return astroid.parse(code)
+
+
+astroid.register_module_extender(astroid.MANAGER, "subprocess", _subprocess_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_threading.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_threading.py
new file mode 100644
index 0000000..77f5bde
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_threading.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016 Claudiu Popa
+# Copyright (c) 2017 Łukasz Rogalski
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import astroid
+
+
+def _thread_transform():
+ return astroid.parse(
+ """
+ class lock(object):
+ def acquire(self, blocking=True, timeout=-1):
+ pass
+ def release(self):
+ pass
+ def __enter__(self):
+ return True
+ def __exit__(self, *args):
+ pass
+
+ def Lock():
+ return lock()
+ """
+ )
+
+
+astroid.register_module_extender(astroid.MANAGER, "threading", _thread_transform)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_typing.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_typing.py
new file mode 100644
index 0000000..cd9a2f4
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_typing.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017-2018 Claudiu Popa
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 David Euresti
+# Copyright (c) 2018 Bryce Guinta
+
+"""Astroid hooks for typing.py support."""
+import typing
+
+from astroid import (
+ MANAGER,
+ UseInferenceDefault,
+ extract_node,
+ inference_tip,
+ nodes,
+ InferenceError,
+)
+
+
+TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
+TYPING_TYPEVARS = {"TypeVar", "NewType"}
+TYPING_TYPEVARS_QUALIFIED = {"typing.TypeVar", "typing.NewType"}
+TYPING_TYPE_TEMPLATE = """
+class Meta(type):
+ def __getitem__(self, item):
+ return self
+
+class {0}(metaclass=Meta):
+ pass
+"""
+TYPING_MEMBERS = set(typing.__all__)
+
+
+def looks_like_typing_typevar_or_newtype(node):
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ return func.attrname in TYPING_TYPEVARS
+ if isinstance(func, nodes.Name):
+ return func.name in TYPING_TYPEVARS
+ return False
+
+
+def infer_typing_typevar_or_newtype(node, context=None):
+ """Infer a typing.TypeVar(...) or typing.NewType(...) call"""
+ try:
+ func = next(node.func.infer(context=context))
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+
+ if func.qname() not in TYPING_TYPEVARS_QUALIFIED:
+ raise UseInferenceDefault
+ if not node.args:
+ raise UseInferenceDefault
+
+ typename = node.args[0].as_string().strip("'")
+ node = extract_node(TYPING_TYPE_TEMPLATE.format(typename))
+ return node.infer(context=context)
+
+
+def _looks_like_typing_subscript(node):
+ """Try to figure out if a Subscript node *might* be a typing-related subscript"""
+ if isinstance(node, nodes.Name):
+ return node.name in TYPING_MEMBERS
+ elif isinstance(node, nodes.Attribute):
+ return node.attrname in TYPING_MEMBERS
+ elif isinstance(node, nodes.Subscript):
+ return _looks_like_typing_subscript(node.value)
+ return False
+
+
+def infer_typing_attr(node, context=None):
+ """Infer a typing.X[...] subscript"""
+ try:
+ value = next(node.value.infer())
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+
+ if not value.qname().startswith("typing."):
+ raise UseInferenceDefault
+
+ node = extract_node(TYPING_TYPE_TEMPLATE.format(value.qname().split(".")[-1]))
+ return node.infer(context=context)
+
+
+MANAGER.register_transform(
+ nodes.Call,
+ inference_tip(infer_typing_typevar_or_newtype),
+ looks_like_typing_typevar_or_newtype,
+)
+MANAGER.register_transform(
+ nodes.Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_uuid.py b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_uuid.py
new file mode 100644
index 0000000..8bda631
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/brain/brain_uuid.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2017 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the UUID module."""
+
+
+from astroid import MANAGER
+from astroid import nodes
+
+
+def _patch_uuid_class(node):
+ # The .int member is patched using __dict__
+ node.locals["int"] = [nodes.Const(0, parent=node)]
+
+
+MANAGER.register_transform(
+ nodes.ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID"
+)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/builder.py b/basic python programmes/venv/Lib/site-packages/astroid/builder.py
new file mode 100644
index 0000000..ac71093
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/builder.py
@@ -0,0 +1,435 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2013 Phil Schaf
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014-2015 Google, Inc.
+# Copyright (c) 2014 Alexander Presnyakov
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2018 Anthony Sottile
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""The AstroidBuilder makes astroid from living object and / or from _ast
+
+The builder is not thread safe and can't be used to parse different sources
+at the same time.
+"""
+
+import os
+import textwrap
+from tokenize import detect_encoding
+
+from astroid._ast import _parse
+from astroid import bases
+from astroid import exceptions
+from astroid import manager
+from astroid import modutils
+from astroid import raw_building
+from astroid import rebuilder
+from astroid import nodes
+from astroid import util
+
+# The name of the transient function that is used to
+# wrap expressions to be extracted when calling
+# extract_node.
+_TRANSIENT_FUNCTION = "__"
+
+# The comment used to select a statement to be extracted
+# when calling extract_node.
+_STATEMENT_SELECTOR = "#@"
+
+MANAGER = manager.AstroidManager()
+
+
+def open_source_file(filename):
+ with open(filename, "rb") as byte_stream:
+ encoding = detect_encoding(byte_stream.readline)[0]
+ stream = open(filename, "r", newline=None, encoding=encoding)
+ data = stream.read()
+ return stream, encoding, data
+
+
+def _can_assign_attr(node, attrname):
+ try:
+ slots = node.slots()
+ except NotImplementedError:
+ pass
+ else:
+ if slots and attrname not in {slot.value for slot in slots}:
+ return False
+ return True
+
+
+class AstroidBuilder(raw_building.InspectBuilder):
+ """Class for building an astroid tree from source code or from a live module.
+
+ The param *manager* specifies the manager class which should be used.
+ If no manager is given, then the default one will be used. The
+ param *apply_transforms* determines if the transforms should be
+ applied after the tree was built from source or from a live object,
+ by default being True.
+ """
+
+ # pylint: disable=redefined-outer-name
+ def __init__(self, manager=None, apply_transforms=True):
+ super(AstroidBuilder, self).__init__()
+ self._manager = manager or MANAGER
+ self._apply_transforms = apply_transforms
+
+ def module_build(self, module, modname=None):
+ """Build an astroid from a living module instance."""
+ node = None
+ path = getattr(module, "__file__", None)
+ if path is not None:
+ path_, ext = os.path.splitext(modutils._path_from_filename(path))
+ if ext in (".py", ".pyc", ".pyo") and os.path.exists(path_ + ".py"):
+ node = self.file_build(path_ + ".py", modname)
+ if node is None:
+ # this is a built-in module
+ # get a partial representation by introspection
+ node = self.inspect_build(module, modname=modname, path=path)
+ if self._apply_transforms:
+ # We have to handle transformation by ourselves since the
+ # rebuilder isn't called for builtin nodes
+ node = self._manager.visit_transforms(node)
+ return node
+
+ def file_build(self, path, modname=None):
+ """Build astroid from a source code file (i.e. from an ast)
+
+ *path* is expected to be a python source file
+ """
+ try:
+ stream, encoding, data = open_source_file(path)
+ except IOError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to load file {path}:\n{error}",
+ modname=modname,
+ path=path,
+ error=exc,
+ ) from exc
+ except (SyntaxError, LookupError) as exc:
+ raise exceptions.AstroidSyntaxError(
+ "Python 3 encoding specification error or unknown encoding:\n"
+ "{error}",
+ modname=modname,
+ path=path,
+ error=exc,
+ ) from exc
+ except UnicodeError as exc: # wrong encoding
+ # detect_encoding returns utf-8 if no encoding specified
+ raise exceptions.AstroidBuildingError(
+ "Wrong or no encoding specified for {filename}.", filename=path
+ ) from exc
+ with stream:
+ # get module name if necessary
+ if modname is None:
+ try:
+ modname = ".".join(modutils.modpath_from_file(path))
+ except ImportError:
+ modname = os.path.splitext(os.path.basename(path))[0]
+ # build astroid representation
+ module = self._data_build(data, modname, path)
+ return self._post_build(module, encoding)
+
+ def string_build(self, data, modname="", path=None):
+ """Build astroid from source code string."""
+ module = self._data_build(data, modname, path)
+ module.file_bytes = data.encode("utf-8")
+ return self._post_build(module, "utf-8")
+
+ def _post_build(self, module, encoding):
+ """Handles encoding and delayed nodes after a module has been built"""
+ module.file_encoding = encoding
+ self._manager.cache_module(module)
+ # post tree building steps after we stored the module in the cache:
+ for from_node in module._import_from_nodes:
+ if from_node.modname == "__future__":
+ for symbol, _ in from_node.names:
+ module.future_imports.add(symbol)
+ self.add_from_names_to_locals(from_node)
+ # handle delayed assattr nodes
+ for delayed in module._delayed_assattr:
+ self.delayed_assattr(delayed)
+
+ # Visit the transforms
+ if self._apply_transforms:
+ module = self._manager.visit_transforms(module)
+ return module
+
+ def _data_build(self, data, modname, path):
+ """Build tree node from data and add some informations"""
+ try:
+ node = _parse(data + "\n")
+ except (TypeError, ValueError, SyntaxError) as exc:
+ raise exceptions.AstroidSyntaxError(
+ "Parsing Python code failed:\n{error}",
+ source=data,
+ modname=modname,
+ path=path,
+ error=exc,
+ ) from exc
+ if path is not None:
+ node_file = os.path.abspath(path)
+ else:
+ node_file = ">"
+ if modname.endswith(".__init__"):
+ modname = modname[:-9]
+ package = True
+ else:
+ package = (
+ path is not None
+ and os.path.splitext(os.path.basename(path))[0] == "__init__"
+ )
+ builder = rebuilder.TreeRebuilder(self._manager)
+ module = builder.visit_module(node, modname, node_file, package)
+ module._import_from_nodes = builder._import_from_nodes
+ module._delayed_assattr = builder._delayed_assattr
+ return module
+
+ def add_from_names_to_locals(self, node):
+ """Store imported names to the locals
+
+ Resort the locals if coming from a delayed node
+ """
+ _key_func = lambda node: node.fromlineno
+
+ def sort_locals(my_list):
+ my_list.sort(key=_key_func)
+
+ for (name, asname) in node.names:
+ if name == "*":
+ try:
+ imported = node.do_import_module()
+ except exceptions.AstroidBuildingError:
+ continue
+ for name in imported.public_names():
+ node.parent.set_local(name, node)
+ sort_locals(node.parent.scope().locals[name])
+ else:
+ node.parent.set_local(asname or name, node)
+ sort_locals(node.parent.scope().locals[asname or name])
+
+ def delayed_assattr(self, node):
+ """Visit a AssAttr node
+
+ This adds name to locals and handle members definition.
+ """
+ try:
+ frame = node.frame()
+ for inferred in node.expr.infer():
+ if inferred is util.Uninferable:
+ continue
+ try:
+ if inferred.__class__ is bases.Instance:
+ inferred = inferred._proxied
+ iattrs = inferred.instance_attrs
+ if not _can_assign_attr(inferred, node.attrname):
+ continue
+ elif isinstance(inferred, bases.Instance):
+ # Const, Tuple, ... we may be wrong, may be not, but
+ # anyway we don't want to pollute builtin's namespace
+ continue
+ elif inferred.is_function:
+ iattrs = inferred.instance_attrs
+ else:
+ iattrs = inferred.locals
+ except AttributeError:
+ # XXX log error
+ continue
+ values = iattrs.setdefault(node.attrname, [])
+ if node in values:
+ continue
+ # get assign in __init__ first XXX useful ?
+ if (
+ frame.name == "__init__"
+ and values
+ and values[0].frame().name != "__init__"
+ ):
+ values.insert(0, node)
+ else:
+ values.append(node)
+ except exceptions.InferenceError:
+ pass
+
+
+def build_namespace_package_module(name, path):
+ return nodes.Module(name, doc="", path=path, package=True)
+
+
+def parse(code, module_name="", path=None, apply_transforms=True):
+ """Parses a source string in order to obtain an astroid AST from it
+
+ :param str code: The code for the module.
+ :param str module_name: The name for the module, if any
+ :param str path: The path for the module
+ :param bool apply_transforms:
+ Apply the transforms for the give code. Use it if you
+ don't want the default transforms to be applied.
+ """
+ code = textwrap.dedent(code)
+ builder = AstroidBuilder(manager=MANAGER, apply_transforms=apply_transforms)
+ return builder.string_build(code, modname=module_name, path=path)
+
+
+def _extract_expressions(node):
+ """Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
+
+ The function walks the AST recursively to search for expressions that
+ are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
+ expression, it completely removes the function call node from the tree,
+ replacing it by the wrapped expression inside the parent.
+
+ :param node: An astroid node.
+ :type node: astroid.bases.NodeNG
+ :yields: The sequence of wrapped expressions on the modified tree
+ expression can be found.
+ """
+ if (
+ isinstance(node, nodes.Call)
+ and isinstance(node.func, nodes.Name)
+ and node.func.name == _TRANSIENT_FUNCTION
+ ):
+ real_expr = node.args[0]
+ real_expr.parent = node.parent
+ # Search for node in all _astng_fields (the fields checked when
+ # get_children is called) of its parent. Some of those fields may
+ # be lists or tuples, in which case the elements need to be checked.
+ # When we find it, replace it by real_expr, so that the AST looks
+ # like no call to _TRANSIENT_FUNCTION ever took place.
+ for name in node.parent._astroid_fields:
+ child = getattr(node.parent, name)
+ if isinstance(child, (list, tuple)):
+ for idx, compound_child in enumerate(child):
+ if compound_child is node:
+ child[idx] = real_expr
+ elif child is node:
+ setattr(node.parent, name, real_expr)
+ yield real_expr
+ else:
+ for child in node.get_children():
+ yield from _extract_expressions(child)
+
+
+def _find_statement_by_line(node, line):
+ """Extracts the statement on a specific line from an AST.
+
+ If the line number of node matches line, it will be returned;
+ otherwise its children are iterated and the function is called
+ recursively.
+
+ :param node: An astroid node.
+ :type node: astroid.bases.NodeNG
+ :param line: The line number of the statement to extract.
+ :type line: int
+ :returns: The statement on the line, or None if no statement for the line
+ can be found.
+ :rtype: astroid.bases.NodeNG or None
+ """
+ if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)):
+ # This is an inaccuracy in the AST: the nodes that can be
+ # decorated do not carry explicit information on which line
+ # the actual definition (class/def), but .fromline seems to
+ # be close enough.
+ node_line = node.fromlineno
+ else:
+ node_line = node.lineno
+
+ if node_line == line:
+ return node
+
+ for child in node.get_children():
+ result = _find_statement_by_line(child, line)
+ if result:
+ return result
+
+ return None
+
+
+def extract_node(code, module_name=""):
+ """Parses some Python code as a module and extracts a designated AST node.
+
+ Statements:
+ To extract one or more statement nodes, append #@ to the end of the line
+
+ Examples:
+ >>> def x():
+ >>> def y():
+ >>> return 1 #@
+
+ The return statement will be extracted.
+
+ >>> class X(object):
+ >>> def meth(self): #@
+ >>> pass
+
+ The function object 'meth' will be extracted.
+
+ Expressions:
+ To extract arbitrary expressions, surround them with the fake
+ function call __(...). After parsing, the surrounded expression
+ will be returned and the whole AST (accessible via the returned
+ node's parent attribute) will look like the function call was
+ never there in the first place.
+
+ Examples:
+ >>> a = __(1)
+
+ The const node will be extracted.
+
+ >>> def x(d=__(foo.bar)): pass
+
+ The node containing the default argument will be extracted.
+
+ >>> def foo(a, b):
+ >>> return 0 < __(len(a)) < b
+
+ The node containing the function call 'len' will be extracted.
+
+ If no statements or expressions are selected, the last toplevel
+ statement will be returned.
+
+ If the selected statement is a discard statement, (i.e. an expression
+ turned into a statement), the wrapped expression is returned instead.
+
+ For convenience, singleton lists are unpacked.
+
+ :param str code: A piece of Python code that is parsed as
+ a module. Will be passed through textwrap.dedent first.
+ :param str module_name: The name of the module.
+ :returns: The designated node from the parse tree, or a list of nodes.
+ :rtype: astroid.bases.NodeNG, or a list of nodes.
+ """
+
+ def _extract(node):
+ if isinstance(node, nodes.Expr):
+ return node.value
+
+ return node
+
+ requested_lines = []
+ for idx, line in enumerate(code.splitlines()):
+ if line.strip().endswith(_STATEMENT_SELECTOR):
+ requested_lines.append(idx + 1)
+
+ tree = parse(code, module_name=module_name)
+ if not tree.body:
+ raise ValueError("Empty tree, cannot extract from it")
+
+ extracted = []
+ if requested_lines:
+ extracted = [_find_statement_by_line(tree, line) for line in requested_lines]
+
+ # Modifies the tree.
+ extracted.extend(_extract_expressions(tree))
+
+ if not extracted:
+ extracted.append(tree.body[-1])
+
+ extracted = [_extract(node) for node in extracted]
+ if len(extracted) == 1:
+ return extracted[0]
+ return extracted
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/context.py b/basic python programmes/venv/Lib/site-packages/astroid/context.py
new file mode 100644
index 0000000..931bbf5
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/context.py
@@ -0,0 +1,173 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Various context related utilities, including inference and call contexts."""
+
+import pprint
+from typing import Optional
+
+
+class InferenceContext:
+ """Provide context for inference
+
+ Store already inferred nodes to save time
+ Account for already visited nodes to infinite stop infinite recursion
+ """
+
+ __slots__ = (
+ "path",
+ "lookupname",
+ "callcontext",
+ "boundnode",
+ "inferred",
+ "extra_context",
+ )
+
+ def __init__(self, path=None, inferred=None):
+ self.path = path or set()
+ """
+ :type: set(tuple(NodeNG, optional(str)))
+
+ Path of visited nodes and their lookupname
+
+ Currently this key is ``(node, context.lookupname)``
+ """
+ self.lookupname = None
+ """
+ :type: optional[str]
+
+ The original name of the node
+
+ e.g.
+ foo = 1
+ The inference of 'foo' is nodes.Const(1) but the lookup name is 'foo'
+ """
+ self.callcontext = None
+ """
+ :type: optional[CallContext]
+
+ The call arguments and keywords for the given context
+ """
+ self.boundnode = None
+ """
+ :type: optional[NodeNG]
+
+ The bound node of the given context
+
+ e.g. the bound node of object.__new__(cls) is the object node
+ """
+ self.inferred = inferred or {}
+ """
+ :type: dict(seq, seq)
+
+ Inferred node contexts to their mapped results
+ Currently the key is ``(node, lookupname, callcontext, boundnode)``
+ and the value is tuple of the inferred results
+ """
+ self.extra_context = {}
+ """
+ :type: dict(NodeNG, Context)
+
+ Context that needs to be passed down through call stacks
+ for call arguments
+ """
+
+ def push(self, node):
+ """Push node into inference path
+
+ :return: True if node is already in context path else False
+ :rtype: bool
+
+ Allows one to see if the given node has already
+ been looked at for this inference context"""
+ name = self.lookupname
+ if (node, name) in self.path:
+ return True
+
+ self.path.add((node, name))
+ return False
+
+ def clone(self):
+ """Clone inference path
+
+ For example, each side of a binary operation (BinOp)
+ starts with the same context but diverge as each side is inferred
+ so the InferenceContext will need be cloned"""
+ # XXX copy lookupname/callcontext ?
+ clone = InferenceContext(set(self.path), inferred=self.inferred)
+ clone.callcontext = self.callcontext
+ clone.boundnode = self.boundnode
+ clone.extra_context = self.extra_context
+ return clone
+
+ def cache_generator(self, key, generator):
+ """Cache result of generator into dictionary
+
+ Used to cache inference results"""
+ results = []
+ for result in generator:
+ results.append(result)
+ yield result
+
+ self.inferred[key] = tuple(results)
+
+ def __str__(self):
+ state = (
+ "%s=%s"
+ % (field, pprint.pformat(getattr(self, field), width=80 - len(field)))
+ for field in self.__slots__
+ )
+ return "%s(%s)" % (type(self).__name__, ",\n ".join(state))
+
+
+class CallContext:
+ """Holds information for a call site."""
+
+ __slots__ = ("args", "keywords")
+
+ def __init__(self, args, keywords=None):
+ """
+ :param List[NodeNG] args: Call positional arguments
+ :param Union[List[nodes.Keyword], None] keywords: Call keywords
+ """
+ self.args = args
+ if keywords:
+ keywords = [(arg.arg, arg.value) for arg in keywords]
+ else:
+ keywords = []
+ self.keywords = keywords
+
+
+def copy_context(context: Optional[InferenceContext]) -> InferenceContext:
+ """Clone a context if given, or return a fresh contexxt"""
+ if context is not None:
+ return context.clone()
+
+ return InferenceContext()
+
+
+def bind_context_to_node(context, node):
+ """Give a context a boundnode
+ to retrieve the correct function name or attribute value
+ with from further inference.
+
+ Do not use an existing context since the boundnode could then
+ be incorrectly propagated higher up in the call stack.
+
+ :param context: Context to use
+ :type context: Optional(context)
+
+ :param node: Node to do name lookups from
+ :type node NodeNG:
+
+ :returns: A new context
+ :rtype: InferenceContext
+ """
+ context = copy_context(context)
+ context.boundnode = node
+ return context
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/decorators.py b/basic python programmes/venv/Lib/site-packages/astroid/decorators.py
new file mode 100644
index 0000000..433c230
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/decorators.py
@@ -0,0 +1,138 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Ashley Whetter
+# Copyright (c) 2018 HoverHell
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+""" A few useful function/method decorators."""
+
+import functools
+
+import wrapt
+
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import util
+
+
+@wrapt.decorator
+def cached(func, instance, args, kwargs):
+ """Simple decorator to cache result of method calls without args."""
+ cache = getattr(instance, "__cache", None)
+ if cache is None:
+ instance.__cache = cache = {}
+ try:
+ return cache[func]
+ except KeyError:
+ cache[func] = result = func(*args, **kwargs)
+ return result
+
+
+class cachedproperty:
+ """ Provides a cached property equivalent to the stacking of
+ @cached and @property, but more efficient.
+
+ After first usage, the becomes part of the object's
+ __dict__. Doing:
+
+ del obj. empties the cache.
+
+ Idea taken from the pyramid_ framework and the mercurial_ project.
+
+ .. _pyramid: http://pypi.python.org/pypi/pyramid
+ .. _mercurial: http://pypi.python.org/pypi/Mercurial
+ """
+
+ __slots__ = ("wrapped",)
+
+ def __init__(self, wrapped):
+ try:
+ wrapped.__name__
+ except AttributeError as exc:
+ raise TypeError("%s must have a __name__ attribute" % wrapped) from exc
+ self.wrapped = wrapped
+
+ @property
+ def __doc__(self):
+ doc = getattr(self.wrapped, "__doc__", None)
+ return "%s" % (
+ "\n%s" % doc if doc else ""
+ )
+
+ def __get__(self, inst, objtype=None):
+ if inst is None:
+ return self
+ val = self.wrapped(inst)
+ setattr(inst, self.wrapped.__name__, val)
+ return val
+
+
+def path_wrapper(func):
+ """return the given infer function wrapped to handle the path
+
+ Used to stop inference if the node has already been looked
+ at for a given `InferenceContext` to prevent infinite recursion
+ """
+
+ @functools.wraps(func)
+ def wrapped(node, context=None, _func=func, **kwargs):
+ """wrapper function handling context"""
+ if context is None:
+ context = contextmod.InferenceContext()
+ if context.push(node):
+ return None
+
+ yielded = set()
+ generator = _func(node, context, **kwargs)
+ try:
+ while True:
+ res = next(generator)
+ # unproxy only true instance, not const, tuple, dict...
+ if res.__class__.__name__ == "Instance":
+ ares = res._proxied
+ else:
+ ares = res
+ if ares not in yielded:
+ yield res
+ yielded.add(ares)
+ except StopIteration as error:
+ if error.args:
+ return error.args[0]
+ return None
+
+ return wrapped
+
+
+@wrapt.decorator
+def yes_if_nothing_inferred(func, instance, args, kwargs):
+ inferred = False
+ for node in func(*args, **kwargs):
+ inferred = True
+ yield node
+ if not inferred:
+ yield util.Uninferable
+
+
+@wrapt.decorator
+def raise_if_nothing_inferred(func, instance, args, kwargs):
+ inferred = False
+ try:
+ generator = func(*args, **kwargs)
+ while True:
+ yield next(generator)
+ inferred = True
+ except StopIteration as error:
+ if not inferred:
+ if error.args:
+ # pylint: disable=not-a-mapping
+ raise exceptions.InferenceError(**error.args[0])
+ else:
+ raise exceptions.InferenceError(
+ "StopIteration raised without any error information."
+ )
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/exceptions.py b/basic python programmes/venv/Lib/site-packages/astroid/exceptions.py
new file mode 100644
index 0000000..7e9d655
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/exceptions.py
@@ -0,0 +1,230 @@
+# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2015-2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""this module contains exceptions used in the astroid library
+"""
+from astroid import util
+
+
+class AstroidError(Exception):
+ """base exception class for all astroid related exceptions
+
+ AstroidError and its subclasses are structured, intended to hold
+ objects representing state when the exception is thrown. Field
+ values are passed to the constructor as keyword-only arguments.
+ Each subclass has its own set of standard fields, but use your
+ best judgment to decide whether a specific exception instance
+ needs more or fewer fields for debugging. Field values may be
+ used to lazily generate the error message: self.message.format()
+ will be called with the field names and values supplied as keyword
+ arguments.
+ """
+
+ def __init__(self, message="", **kws):
+ super(AstroidError, self).__init__(message)
+ self.message = message
+ for key, value in kws.items():
+ setattr(self, key, value)
+
+ def __str__(self):
+ return self.message.format(**vars(self))
+
+
+class AstroidBuildingError(AstroidError):
+ """exception class when we are unable to build an astroid representation
+
+ Standard attributes:
+ modname: Name of the module that AST construction failed for.
+ error: Exception raised during construction.
+ """
+
+ def __init__(self, message="Failed to import module {modname}.", **kws):
+ super(AstroidBuildingError, self).__init__(message, **kws)
+
+
+class AstroidImportError(AstroidBuildingError):
+ """Exception class used when a module can't be imported by astroid."""
+
+
+class TooManyLevelsError(AstroidImportError):
+ """Exception class which is raised when a relative import was beyond the top-level.
+
+ Standard attributes:
+ level: The level which was attempted.
+ name: the name of the module on which the relative import was attempted.
+ """
+
+ level = None
+ name = None
+
+ def __init__(
+ self,
+ message="Relative import with too many levels " "({level}) for module {name!r}",
+ **kws
+ ):
+ super(TooManyLevelsError, self).__init__(message, **kws)
+
+
+class AstroidSyntaxError(AstroidBuildingError):
+ """Exception class used when a module can't be parsed."""
+
+
+class NoDefault(AstroidError):
+ """raised by function's `default_value` method when an argument has
+ no default value
+
+ Standard attributes:
+ func: Function node.
+ name: Name of argument without a default.
+ """
+
+ func = None
+ name = None
+
+ def __init__(self, message="{func!r} has no default for {name!r}.", **kws):
+ super(NoDefault, self).__init__(message, **kws)
+
+
+class ResolveError(AstroidError):
+ """Base class of astroid resolution/inference error.
+
+ ResolveError is not intended to be raised.
+
+ Standard attributes:
+ context: InferenceContext object.
+ """
+
+ context = None
+
+
+class MroError(ResolveError):
+ """Error raised when there is a problem with method resolution of a class.
+
+ Standard attributes:
+ mros: A sequence of sequences containing ClassDef nodes.
+ cls: ClassDef node whose MRO resolution failed.
+ context: InferenceContext object.
+ """
+
+ mros = ()
+ cls = None
+
+ def __str__(self):
+ mro_names = ", ".join(
+ "({})".format(", ".join(b.name for b in m)) for m in self.mros
+ )
+ return self.message.format(mros=mro_names, cls=self.cls)
+
+
+class DuplicateBasesError(MroError):
+ """Error raised when there are duplicate bases in the same class bases."""
+
+
+class InconsistentMroError(MroError):
+ """Error raised when a class's MRO is inconsistent."""
+
+
+class SuperError(ResolveError):
+ """Error raised when there is a problem with a *super* call.
+
+ Standard attributes:
+ *super_*: The Super instance that raised the exception.
+ context: InferenceContext object.
+ """
+
+ super_ = None
+
+ def __str__(self):
+ return self.message.format(**vars(self.super_))
+
+
+class InferenceError(ResolveError):
+ """raised when we are unable to infer a node
+
+ Standard attributes:
+ node: The node inference was called on.
+ context: InferenceContext object.
+ """
+
+ node = None
+ context = None
+
+ def __init__(self, message="Inference failed for {node!r}.", **kws):
+ super(InferenceError, self).__init__(message, **kws)
+
+
+# Why does this inherit from InferenceError rather than ResolveError?
+# Changing it causes some inference tests to fail.
+class NameInferenceError(InferenceError):
+ """Raised when a name lookup fails, corresponds to NameError.
+
+ Standard attributes:
+ name: The name for which lookup failed, as a string.
+ scope: The node representing the scope in which the lookup occurred.
+ context: InferenceContext object.
+ """
+
+ name = None
+ scope = None
+
+ def __init__(self, message="{name!r} not found in {scope!r}.", **kws):
+ super(NameInferenceError, self).__init__(message, **kws)
+
+
+class AttributeInferenceError(ResolveError):
+ """Raised when an attribute lookup fails, corresponds to AttributeError.
+
+ Standard attributes:
+ target: The node for which lookup failed.
+ attribute: The attribute for which lookup failed, as a string.
+ context: InferenceContext object.
+ """
+
+ target = None
+ attribute = None
+
+ def __init__(self, message="{attribute!r} not found on {target!r}.", **kws):
+ super(AttributeInferenceError, self).__init__(message, **kws)
+
+
+class UseInferenceDefault(Exception):
+ """exception to be raised in custom inference function to indicate that it
+ should go back to the default behaviour
+ """
+
+
+class _NonDeducibleTypeHierarchy(Exception):
+ """Raised when is_subtype / is_supertype can't deduce the relation between two types."""
+
+
+class AstroidIndexError(AstroidError):
+ """Raised when an Indexable / Mapping does not have an index / key."""
+
+
+class AstroidTypeError(AstroidError):
+ """Raised when a TypeError would be expected in Python code."""
+
+
+class InferenceOverwriteError(AstroidError):
+ """Raised when an inference tip is overwritten
+
+ Currently only used for debugging.
+ """
+
+
+# Backwards-compatibility aliases
+OperationError = util.BadOperationMessage
+UnaryOperationError = util.BadUnaryOperationMessage
+BinaryOperationError = util.BadBinaryOperationMessage
+
+SuperArgumentTypeError = SuperError
+UnresolvableName = NameInferenceError
+NotFoundError = AttributeInferenceError
+AstroidBuildingException = AstroidBuildingError
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/helpers.py b/basic python programmes/venv/Lib/site-packages/astroid/helpers.py
new file mode 100644
index 0000000..e58d191
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/helpers.py
@@ -0,0 +1,272 @@
+# Copyright (c) 2015-2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""
+Various helper utilities.
+"""
+
+import builtins as builtins_mod
+
+from astroid import bases
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import manager
+from astroid import nodes
+from astroid import raw_building
+from astroid import scoped_nodes
+from astroid import util
+
+
+BUILTINS = builtins_mod.__name__
+
+
+def _build_proxy_class(cls_name, builtins):
+ proxy = raw_building.build_class(cls_name)
+ proxy.parent = builtins
+ return proxy
+
+
+def _function_type(function, builtins):
+ if isinstance(function, scoped_nodes.Lambda):
+ if function.root().name == BUILTINS:
+ cls_name = "builtin_function_or_method"
+ else:
+ cls_name = "function"
+ elif isinstance(function, bases.BoundMethod):
+ cls_name = "method"
+ elif isinstance(function, bases.UnboundMethod):
+ cls_name = "function"
+ return _build_proxy_class(cls_name, builtins)
+
+
+def _object_type(node, context=None):
+ astroid_manager = manager.AstroidManager()
+ builtins = astroid_manager.astroid_cache[BUILTINS]
+ context = context or contextmod.InferenceContext()
+
+ for inferred in node.infer(context=context):
+ if isinstance(inferred, scoped_nodes.ClassDef):
+ if inferred.newstyle:
+ metaclass = inferred.metaclass()
+ if metaclass:
+ yield metaclass
+ continue
+ yield builtins.getattr("type")[0]
+ elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
+ yield _function_type(inferred, builtins)
+ elif isinstance(inferred, scoped_nodes.Module):
+ yield _build_proxy_class("module", builtins)
+ else:
+ yield inferred._proxied
+
+
+def object_type(node, context=None):
+ """Obtain the type of the given node
+
+ This is used to implement the ``type`` builtin, which means that it's
+ used for inferring type calls, as well as used in a couple of other places
+ in the inference.
+ The node will be inferred first, so this function can support all
+ sorts of objects, as long as they support inference.
+ """
+
+ try:
+ types = set(_object_type(node, context))
+ except exceptions.InferenceError:
+ return util.Uninferable
+ if len(types) > 1 or not types:
+ return util.Uninferable
+ return list(types)[0]
+
+
+def _object_type_is_subclass(obj_type, class_or_seq, context=None):
+ if not isinstance(class_or_seq, (tuple, list)):
+ class_seq = (class_or_seq,)
+ else:
+ class_seq = class_or_seq
+
+ if obj_type is util.Uninferable:
+ return util.Uninferable
+
+ # Instances are not types
+ class_seq = [
+ item if not isinstance(item, bases.Instance) else util.Uninferable
+ for item in class_seq
+ ]
+ # strict compatibility with issubclass
+ # issubclass(type, (object, 1)) evaluates to true
+ # issubclass(object, (1, type)) raises TypeError
+ for klass in class_seq:
+ if klass is util.Uninferable:
+ raise exceptions.AstroidTypeError("arg 2 must be a type or tuple of types")
+
+ for obj_subclass in obj_type.mro():
+ if obj_subclass == klass:
+ return True
+ return False
+
+
+def object_isinstance(node, class_or_seq, context=None):
+ """Check if a node 'isinstance' any node in class_or_seq
+
+ :param node: A given node
+ :param class_or_seq: Union[nodes.NodeNG, Sequence[nodes.NodeNG]]
+ :rtype: bool
+
+ :raises AstroidTypeError: if the given ``classes_or_seq`` are not types
+ """
+ obj_type = object_type(node, context)
+ if obj_type is util.Uninferable:
+ return util.Uninferable
+ return _object_type_is_subclass(obj_type, class_or_seq, context=context)
+
+
+def object_issubclass(node, class_or_seq, context=None):
+ """Check if a type is a subclass of any node in class_or_seq
+
+ :param node: A given node
+ :param class_or_seq: Union[Nodes.NodeNG, Sequence[nodes.NodeNG]]
+ :rtype: bool
+
+ :raises AstroidTypeError: if the given ``classes_or_seq`` are not types
+ :raises AstroidError: if the type of the given node cannot be inferred
+ or its type's mro doesn't work
+ """
+ if not isinstance(node, nodes.ClassDef):
+ raise TypeError("{node} needs to be a ClassDef node".format(node=node))
+ return _object_type_is_subclass(node, class_or_seq, context=context)
+
+
+def safe_infer(node, context=None):
+ """Return the inferred value for the given node.
+
+ Return None if inference failed or if there is some ambiguity (more than
+ one node has been inferred).
+ """
+ try:
+ inferit = node.infer(context=context)
+ value = next(inferit)
+ except exceptions.InferenceError:
+ return None
+ try:
+ next(inferit)
+ return None # None if there is ambiguity on the inferred node
+ except exceptions.InferenceError:
+ return None # there is some kind of ambiguity
+ except StopIteration:
+ return value
+
+
+def has_known_bases(klass, context=None):
+ """Return true if all base classes of a class could be inferred."""
+ try:
+ return klass._all_bases_known
+ except AttributeError:
+ pass
+ for base in klass.bases:
+ result = safe_infer(base, context=context)
+ # TODO: check for A->B->A->B pattern in class structure too?
+ if (
+ not isinstance(result, scoped_nodes.ClassDef)
+ or result is klass
+ or not has_known_bases(result, context=context)
+ ):
+ klass._all_bases_known = False
+ return False
+ klass._all_bases_known = True
+ return True
+
+
+def _type_check(type1, type2):
+ if not all(map(has_known_bases, (type1, type2))):
+ raise exceptions._NonDeducibleTypeHierarchy
+
+ if not all([type1.newstyle, type2.newstyle]):
+ return False
+ try:
+ return type1 in type2.mro()[:-1]
+ except exceptions.MroError:
+ # The MRO is invalid.
+ raise exceptions._NonDeducibleTypeHierarchy
+
+
+def is_subtype(type1, type2):
+ """Check if *type1* is a subtype of *typ2*."""
+ return _type_check(type2, type1)
+
+
+def is_supertype(type1, type2):
+ """Check if *type2* is a supertype of *type1*."""
+ return _type_check(type1, type2)
+
+
+def class_instance_as_index(node):
+ """Get the value as an index for the given instance.
+
+ If an instance provides an __index__ method, then it can
+ be used in some scenarios where an integer is expected,
+ for instance when multiplying or subscripting a list.
+ """
+ context = contextmod.InferenceContext()
+ context.callcontext = contextmod.CallContext(args=[node])
+
+ try:
+ for inferred in node.igetattr("__index__", context=context):
+ if not isinstance(inferred, bases.BoundMethod):
+ continue
+
+ for result in inferred.infer_call_result(node, context=context):
+ if isinstance(result, nodes.Const) and isinstance(result.value, int):
+ return result
+ except exceptions.InferenceError:
+ pass
+ return None
+
+
+def object_len(node, context=None):
+ """Infer length of given node object
+
+ :param Union[nodes.ClassDef, nodes.Instance] node:
+ :param node: Node to infer length of
+
+ :raises AstroidTypeError: If an invalid node is returned
+ from __len__ method or no __len__ method exists
+ :raises InferenceError: If the given node cannot be inferred
+ or if multiple nodes are inferred
+ :rtype int: Integer length of node
+ """
+ from astroid.objects import FrozenSet
+
+ inferred_node = safe_infer(node, context=context)
+ if inferred_node is None or inferred_node is util.Uninferable:
+ raise exceptions.InferenceError(node=node)
+ if isinstance(inferred_node, nodes.Const) and isinstance(
+ inferred_node.value, (bytes, str)
+ ):
+ return len(inferred_node.value)
+ if isinstance(inferred_node, (nodes.List, nodes.Set, nodes.Tuple, FrozenSet)):
+ return len(inferred_node.elts)
+ if isinstance(inferred_node, nodes.Dict):
+ return len(inferred_node.items)
+ try:
+ node_type = object_type(inferred_node, context=context)
+ len_call = next(node_type.igetattr("__len__", context=context))
+ except exceptions.AttributeInferenceError:
+ raise exceptions.AstroidTypeError(
+ "object of type '{}' has no len()".format(len_call.pytype())
+ )
+
+ result_of_len = next(len_call.infer_call_result(node, context))
+ if (
+ isinstance(result_of_len, nodes.Const)
+ and result_of_len.pytype() == "builtins.int"
+ ):
+ return result_of_len.value
+ raise exceptions.AstroidTypeError(
+ "'{}' object cannot be interpreted as an integer".format(result_of_len)
+ )
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/inference.py b/basic python programmes/venv/Lib/site-packages/astroid/inference.py
new file mode 100644
index 0000000..cb7b004
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/inference.py
@@ -0,0 +1,917 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2012 FELD Boris
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Dmitry Pribysh
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2017 Michał Masłowski
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Ashley Whetter
+# Copyright (c) 2018 HoverHell
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""this module contains a set of functions to handle inference on astroid trees
+"""
+
+import functools
+import itertools
+import operator
+
+from astroid import bases
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import decorators
+from astroid import helpers
+from astroid import manager
+from astroid import nodes
+from astroid.interpreter import dunder_lookup
+from astroid import protocols
+from astroid import util
+
+
+MANAGER = manager.AstroidManager()
+
+
+# .infer method ###############################################################
+
+
+def infer_end(self, context=None):
+ """inference's end for node such as Module, ClassDef, FunctionDef,
+ Const...
+
+ """
+ yield self
+
+
+nodes.Module._infer = infer_end
+nodes.ClassDef._infer = infer_end
+nodes.FunctionDef._infer = infer_end
+nodes.Lambda._infer = infer_end
+nodes.Const._infer = infer_end
+nodes.Slice._infer = infer_end
+
+
+def _infer_sequence_helper(node, context=None):
+ """Infer all values based on _BaseContainer.elts"""
+ values = []
+
+ for elt in node.elts:
+ if isinstance(elt, nodes.Starred):
+ starred = helpers.safe_infer(elt.value, context)
+ if not starred:
+ raise exceptions.InferenceError(node=node, context=context)
+ if not hasattr(starred, "elts"):
+ raise exceptions.InferenceError(node=node, context=context)
+ values.extend(_infer_sequence_helper(starred))
+ else:
+ values.append(elt)
+ return values
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_sequence(self, context=None):
+ if not any(isinstance(e, nodes.Starred) for e in self.elts):
+ yield self
+ else:
+ values = _infer_sequence_helper(self, context)
+ new_seq = type(self)(
+ lineno=self.lineno, col_offset=self.col_offset, parent=self.parent
+ )
+ new_seq.postinit(values)
+
+ yield new_seq
+
+
+nodes.List._infer = infer_sequence
+nodes.Tuple._infer = infer_sequence
+nodes.Set._infer = infer_sequence
+
+
+def infer_map(self, context=None):
+ if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items):
+ yield self
+ else:
+ items = _infer_map(self, context)
+ new_seq = type(self)(self.lineno, self.col_offset, self.parent)
+ new_seq.postinit(list(items.items()))
+ yield new_seq
+
+
+def _update_with_replacement(lhs_dict, rhs_dict):
+ """Delete nodes that equate to duplicate keys
+
+ Since an astroid node doesn't 'equal' another node with the same value,
+ this function uses the as_string method to make sure duplicate keys
+ don't get through
+
+ Note that both the key and the value are astroid nodes
+
+ Fixes issue with DictUnpack causing duplicte keys
+ in inferred Dict items
+
+ :param dict(nodes.NodeNG, nodes.NodeNG) lhs_dict: Dictionary to 'merge' nodes into
+ :param dict(nodes.NodeNG, nodes.NodeNG) rhs_dict: Dictionary with nodes to pull from
+ :return dict(nodes.NodeNG, nodes.NodeNG): merged dictionary of nodes
+ """
+ combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items())
+ # Overwrite keys which have the same string values
+ string_map = {key.as_string(): (key, value) for key, value in combined_dict}
+ # Return to dictionary
+ return dict(string_map.values())
+
+
+def _infer_map(node, context):
+ """Infer all values based on Dict.items"""
+ values = {}
+ for name, value in node.items:
+ if isinstance(name, nodes.DictUnpack):
+ double_starred = helpers.safe_infer(value, context)
+ if not double_starred:
+ raise exceptions.InferenceError
+ if not isinstance(double_starred, nodes.Dict):
+ raise exceptions.InferenceError(node=node, context=context)
+ unpack_items = _infer_map(double_starred, context)
+ values = _update_with_replacement(values, unpack_items)
+ else:
+ key = helpers.safe_infer(name, context=context)
+ value = helpers.safe_infer(value, context=context)
+ if any(not elem for elem in (key, value)):
+ raise exceptions.InferenceError(node=node, context=context)
+ values = _update_with_replacement(values, {key: value})
+ return values
+
+
+nodes.Dict._infer = infer_map
+
+
+def _higher_function_scope(node):
+ """ Search for the first function which encloses the given
+ scope. This can be used for looking up in that function's
+ scope, in case looking up in a lower scope for a particular
+ name fails.
+
+ :param node: A scope node.
+ :returns:
+ ``None``, if no parent function scope was found,
+ otherwise an instance of :class:`astroid.scoped_nodes.Function`,
+ which encloses the given node.
+ """
+ current = node
+ while current.parent and not isinstance(current.parent, nodes.FunctionDef):
+ current = current.parent
+ if current and current.parent:
+ return current.parent
+ return None
+
+
+def infer_name(self, context=None):
+ """infer a Name: use name lookup rules"""
+ frame, stmts = self.lookup(self.name)
+ if not stmts:
+ # Try to see if the name is enclosed in a nested function
+ # and use the higher (first function) scope for searching.
+ parent_function = _higher_function_scope(self.scope())
+ if parent_function:
+ _, stmts = parent_function.lookup(self.name)
+
+ if not stmts:
+ raise exceptions.NameInferenceError(
+ name=self.name, scope=self.scope(), context=context
+ )
+ context = contextmod.copy_context(context)
+ context.lookupname = self.name
+ return bases._infer_stmts(stmts, context, frame)
+
+
+# pylint: disable=no-value-for-parameter
+nodes.Name._infer = decorators.raise_if_nothing_inferred(
+ decorators.path_wrapper(infer_name)
+)
+nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_call(self, context=None):
+ """infer a Call node by trying to guess what the function returns"""
+ callcontext = contextmod.copy_context(context)
+ callcontext.callcontext = contextmod.CallContext(
+ args=self.args, keywords=self.keywords
+ )
+ callcontext.boundnode = None
+ if context is not None:
+ callcontext.extra_context = _populate_context_lookup(self, context.clone())
+
+ for callee in self.func.infer(context):
+ if callee is util.Uninferable:
+ yield callee
+ continue
+ try:
+ if hasattr(callee, "infer_call_result"):
+ yield from callee.infer_call_result(caller=self, context=callcontext)
+ except exceptions.InferenceError:
+ continue
+ return dict(node=self, context=context)
+
+
+nodes.Call._infer = infer_call
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_import(self, context=None, asname=True):
+ """infer an Import node: return the imported module/object"""
+ name = context.lookupname
+ if name is None:
+ raise exceptions.InferenceError(node=self, context=context)
+
+ try:
+ if asname:
+ yield self.do_import_module(self.real_name(name))
+ else:
+ yield self.do_import_module(name)
+ except exceptions.AstroidBuildingError as exc:
+ raise exceptions.InferenceError(node=self, context=context) from exc
+
+
+nodes.Import._infer = infer_import
+
+
+def infer_name_module(self, name):
+ context = contextmod.InferenceContext()
+ context.lookupname = name
+ return self.infer(context, asname=False)
+
+
+nodes.Import.infer_name_module = infer_name_module
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_import_from(self, context=None, asname=True):
+ """infer a ImportFrom node: return the imported module/object"""
+ name = context.lookupname
+ if name is None:
+ raise exceptions.InferenceError(node=self, context=context)
+ if asname:
+ name = self.real_name(name)
+
+ try:
+ module = self.do_import_module()
+ except exceptions.AstroidBuildingError as exc:
+ raise exceptions.InferenceError(node=self, context=context) from exc
+
+ try:
+ context = contextmod.copy_context(context)
+ context.lookupname = name
+ stmts = module.getattr(name, ignore_locals=module is self.root())
+ return bases._infer_stmts(stmts, context)
+ except exceptions.AttributeInferenceError as error:
+ raise exceptions.InferenceError(
+ error.message, target=self, attribute=name, context=context
+ ) from error
+
+
+nodes.ImportFrom._infer = infer_import_from
+
+
+@decorators.raise_if_nothing_inferred
+def infer_attribute(self, context=None):
+ """infer an Attribute node by using getattr on the associated object"""
+ for owner in self.expr.infer(context):
+ if owner is util.Uninferable:
+ yield owner
+ continue
+
+ if context and context.boundnode:
+ # This handles the situation where the attribute is accessed through a subclass
+ # of a base class and the attribute is defined at the base class's level,
+ # by taking in consideration a redefinition in the subclass.
+ if isinstance(owner, bases.Instance) and isinstance(
+ context.boundnode, bases.Instance
+ ):
+ try:
+ if helpers.is_subtype(
+ helpers.object_type(context.boundnode),
+ helpers.object_type(owner),
+ ):
+ owner = context.boundnode
+ except exceptions._NonDeducibleTypeHierarchy:
+ # Can't determine anything useful.
+ pass
+
+ try:
+ context.boundnode = owner
+ yield from owner.igetattr(self.attrname, context)
+ context.boundnode = None
+ except (exceptions.AttributeInferenceError, exceptions.InferenceError):
+ context.boundnode = None
+ except AttributeError:
+ # XXX method / function
+ context.boundnode = None
+ return dict(node=self, context=context)
+
+
+nodes.Attribute._infer = decorators.path_wrapper(infer_attribute)
+nodes.AssignAttr.infer_lhs = infer_attribute # # won't work with a path wrapper
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_global(self, context=None):
+ if context.lookupname is None:
+ raise exceptions.InferenceError(node=self, context=context)
+ try:
+ return bases._infer_stmts(self.root().getattr(context.lookupname), context)
+ except exceptions.AttributeInferenceError as error:
+ raise exceptions.InferenceError(
+ error.message, target=self, attribute=context.lookupname, context=context
+ ) from error
+
+
+nodes.Global._infer = infer_global
+
+
+_SUBSCRIPT_SENTINEL = object()
+
+
+@decorators.raise_if_nothing_inferred
+def infer_subscript(self, context=None):
+ """Inference for subscripts
+
+ We're understanding if the index is a Const
+ or a slice, passing the result of inference
+ to the value's `getitem` method, which should
+ handle each supported index type accordingly.
+ """
+
+ found_one = False
+ for value in self.value.infer(context):
+ if value is util.Uninferable:
+ yield util.Uninferable
+ return None
+ for index in self.slice.infer(context):
+ if index is util.Uninferable:
+ yield util.Uninferable
+ return None
+
+ # Try to deduce the index value.
+ index_value = _SUBSCRIPT_SENTINEL
+ if value.__class__ == bases.Instance:
+ index_value = index
+ else:
+ if index.__class__ == bases.Instance:
+ instance_as_index = helpers.class_instance_as_index(index)
+ if instance_as_index:
+ index_value = instance_as_index
+ else:
+ index_value = index
+ if index_value is _SUBSCRIPT_SENTINEL:
+ raise exceptions.InferenceError(node=self, context=context)
+
+ try:
+ assigned = value.getitem(index_value, context)
+ except (
+ exceptions.AstroidTypeError,
+ exceptions.AstroidIndexError,
+ exceptions.AttributeInferenceError,
+ AttributeError,
+ ) as exc:
+ raise exceptions.InferenceError(node=self, context=context) from exc
+
+ # Prevent inferring if the inferred subscript
+ # is the same as the original subscripted object.
+ if self is assigned or assigned is util.Uninferable:
+ yield util.Uninferable
+ return None
+ yield from assigned.infer(context)
+ found_one = True
+
+ if found_one:
+ return dict(node=self, context=context)
+ return None
+
+
+nodes.Subscript._infer = decorators.path_wrapper(infer_subscript)
+nodes.Subscript.infer_lhs = infer_subscript
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def _infer_boolop(self, context=None):
+ """Infer a boolean operation (and / or / not).
+
+ The function will calculate the boolean operation
+ for all pairs generated through inference for each component
+ node.
+ """
+ values = self.values
+ if self.op == "or":
+ predicate = operator.truth
+ else:
+ predicate = operator.not_
+
+ try:
+ values = [value.infer(context=context) for value in values]
+ except exceptions.InferenceError:
+ yield util.Uninferable
+ return None
+
+ for pair in itertools.product(*values):
+ if any(item is util.Uninferable for item in pair):
+ # Can't infer the final result, just yield Uninferable.
+ yield util.Uninferable
+ continue
+
+ bool_values = [item.bool_value() for item in pair]
+ if any(item is util.Uninferable for item in bool_values):
+ # Can't infer the final result, just yield Uninferable.
+ yield util.Uninferable
+ continue
+
+ # Since the boolean operations are short circuited operations,
+ # this code yields the first value for which the predicate is True
+ # and if no value respected the predicate, then the last value will
+ # be returned (or Uninferable if there was no last value).
+ # This is conforming to the semantics of `and` and `or`:
+ # 1 and 0 -> 1
+ # 0 and 1 -> 0
+ # 1 or 0 -> 1
+ # 0 or 1 -> 1
+ value = util.Uninferable
+ for value, bool_value in zip(pair, bool_values):
+ if predicate(bool_value):
+ yield value
+ break
+ else:
+ yield value
+
+ return dict(node=self, context=context)
+
+
+nodes.BoolOp._infer = _infer_boolop
+
+
+# UnaryOp, BinOp and AugAssign inferences
+
+
+def _filter_operation_errors(self, infer_callable, context, error):
+ for result in infer_callable(self, context):
+ if isinstance(result, error):
+ # For the sake of .infer(), we don't care about operation
+ # errors, which is the job of pylint. So return something
+ # which shows that we can't infer the result.
+ yield util.Uninferable
+ else:
+ yield result
+
+
+def _infer_unaryop(self, context=None):
+ """Infer what an UnaryOp should return when evaluated."""
+ for operand in self.operand.infer(context):
+ try:
+ yield operand.infer_unary_op(self.op)
+ except TypeError as exc:
+ # The operand doesn't support this operation.
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ except AttributeError as exc:
+ meth = protocols.UNARY_OP_METHOD[self.op]
+ if meth is None:
+ # `not node`. Determine node's boolean
+ # value and negate its result, unless it is
+ # Uninferable, which will be returned as is.
+ bool_value = operand.bool_value()
+ if bool_value is not util.Uninferable:
+ yield nodes.const_factory(not bool_value)
+ else:
+ yield util.Uninferable
+ else:
+ if not isinstance(operand, (bases.Instance, nodes.ClassDef)):
+ # The operation was used on something which
+ # doesn't support it.
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ continue
+
+ try:
+ try:
+ methods = dunder_lookup.lookup(operand, meth)
+ except exceptions.AttributeInferenceError:
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ continue
+
+ meth = methods[0]
+ inferred = next(meth.infer(context=context))
+ if inferred is util.Uninferable or not inferred.callable():
+ continue
+
+ context = contextmod.copy_context(context)
+ context.callcontext = contextmod.CallContext(args=[operand])
+ call_results = inferred.infer_call_result(self, context=context)
+ result = next(call_results, None)
+ if result is None:
+ # Failed to infer, return the same type.
+ yield operand
+ else:
+ yield result
+ except exceptions.AttributeInferenceError as exc:
+ # The unary operation special method was not found.
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ except exceptions.InferenceError:
+ yield util.Uninferable
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_unaryop(self, context=None):
+ """Infer what an UnaryOp should return when evaluated."""
+ yield from _filter_operation_errors(
+ self, _infer_unaryop, context, util.BadUnaryOperationMessage
+ )
+ return dict(node=self, context=context)
+
+
+nodes.UnaryOp._infer_unaryop = _infer_unaryop
+nodes.UnaryOp._infer = infer_unaryop
+
+
+def _is_not_implemented(const):
+ """Check if the given const node is NotImplemented."""
+ return isinstance(const, nodes.Const) and const.value is NotImplemented
+
+
+def _invoke_binop_inference(instance, opnode, op, other, context, method_name):
+ """Invoke binary operation inference on the given instance."""
+ methods = dunder_lookup.lookup(instance, method_name)
+ context = contextmod.bind_context_to_node(context, instance)
+ method = methods[0]
+ inferred = next(method.infer(context=context))
+ if inferred is util.Uninferable:
+ raise exceptions.InferenceError
+ return instance.infer_binary_op(opnode, op, other, context, inferred)
+
+
+def _aug_op(instance, opnode, op, other, context, reverse=False):
+ """Get an inference callable for an augmented binary operation."""
+ method_name = protocols.AUGMENTED_OP_METHOD[op]
+ return functools.partial(
+ _invoke_binop_inference,
+ instance=instance,
+ op=op,
+ opnode=opnode,
+ other=other,
+ context=context,
+ method_name=method_name,
+ )
+
+
+def _bin_op(instance, opnode, op, other, context, reverse=False):
+ """Get an inference callable for a normal binary operation.
+
+ If *reverse* is True, then the reflected method will be used instead.
+ """
+ if reverse:
+ method_name = protocols.REFLECTED_BIN_OP_METHOD[op]
+ else:
+ method_name = protocols.BIN_OP_METHOD[op]
+ return functools.partial(
+ _invoke_binop_inference,
+ instance=instance,
+ op=op,
+ opnode=opnode,
+ other=other,
+ context=context,
+ method_name=method_name,
+ )
+
+
+def _get_binop_contexts(context, left, right):
+ """Get contexts for binary operations.
+
+ This will return two inference contexts, the first one
+ for x.__op__(y), the other one for y.__rop__(x), where
+ only the arguments are inversed.
+ """
+ # The order is important, since the first one should be
+ # left.__op__(right).
+ for arg in (right, left):
+ new_context = context.clone()
+ new_context.callcontext = contextmod.CallContext(args=[arg])
+ new_context.boundnode = None
+ yield new_context
+
+
+def _same_type(type1, type2):
+ """Check if type1 is the same as type2."""
+ return type1.qname() == type2.qname()
+
+
+def _get_binop_flow(
+ left, left_type, binary_opnode, right, right_type, context, reverse_context
+):
+ """Get the flow for binary operations.
+
+ The rules are a bit messy:
+
+ * if left and right have the same type, then only one
+ method will be called, left.__op__(right)
+ * if left and right are unrelated typewise, then first
+ left.__op__(right) is tried and if this does not exist
+ or returns NotImplemented, then right.__rop__(left) is tried.
+ * if left is a subtype of right, then only left.__op__(right)
+ is tried.
+ * if left is a supertype of right, then right.__rop__(left)
+ is first tried and then left.__op__(right)
+ """
+ op = binary_opnode.op
+ if _same_type(left_type, right_type):
+ methods = [_bin_op(left, binary_opnode, op, right, context)]
+ elif helpers.is_subtype(left_type, right_type):
+ methods = [_bin_op(left, binary_opnode, op, right, context)]
+ elif helpers.is_supertype(left_type, right_type):
+ methods = [
+ _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
+ _bin_op(left, binary_opnode, op, right, context),
+ ]
+ else:
+ methods = [
+ _bin_op(left, binary_opnode, op, right, context),
+ _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
+ ]
+ return methods
+
+
+def _get_aug_flow(
+ left, left_type, aug_opnode, right, right_type, context, reverse_context
+):
+ """Get the flow for augmented binary operations.
+
+ The rules are a bit messy:
+
+ * if left and right have the same type, then left.__augop__(right)
+ is first tried and then left.__op__(right).
+ * if left and right are unrelated typewise, then
+ left.__augop__(right) is tried, then left.__op__(right)
+ is tried and then right.__rop__(left) is tried.
+ * if left is a subtype of right, then left.__augop__(right)
+ is tried and then left.__op__(right).
+ * if left is a supertype of right, then left.__augop__(right)
+ is tried, then right.__rop__(left) and then
+ left.__op__(right)
+ """
+ bin_op = aug_opnode.op.strip("=")
+ aug_op = aug_opnode.op
+ if _same_type(left_type, right_type):
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ ]
+ elif helpers.is_subtype(left_type, right_type):
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ ]
+ elif helpers.is_supertype(left_type, right_type):
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ ]
+ else:
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
+ ]
+ return methods
+
+
+def _infer_binary_operation(left, right, binary_opnode, context, flow_factory):
+ """Infer a binary operation between a left operand and a right operand
+
+ This is used by both normal binary operations and augmented binary
+ operations, the only difference is the flow factory used.
+ """
+
+ context, reverse_context = _get_binop_contexts(context, left, right)
+ left_type = helpers.object_type(left)
+ right_type = helpers.object_type(right)
+ methods = flow_factory(
+ left, left_type, binary_opnode, right, right_type, context, reverse_context
+ )
+ for method in methods:
+ try:
+ results = list(method())
+ except AttributeError:
+ continue
+ except exceptions.AttributeInferenceError:
+ continue
+ except exceptions.InferenceError:
+ yield util.Uninferable
+ return
+ else:
+ if any(result is util.Uninferable for result in results):
+ yield util.Uninferable
+ return
+
+ if all(map(_is_not_implemented, results)):
+ continue
+ not_implemented = sum(
+ 1 for result in results if _is_not_implemented(result)
+ )
+ if not_implemented and not_implemented != len(results):
+ # Can't infer yet what this is.
+ yield util.Uninferable
+ return
+
+ yield from results
+ return
+ # The operation doesn't seem to be supported so let the caller know about it
+ yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type)
+
+
+def _infer_binop(self, context):
+ """Binary operation inference logic."""
+ left = self.left
+ right = self.right
+
+ # we use two separate contexts for evaluating lhs and rhs because
+ # 1. evaluating lhs may leave some undesired entries in context.path
+ # which may not let us infer right value of rhs
+ context = context or contextmod.InferenceContext()
+ lhs_context = contextmod.copy_context(context)
+ rhs_context = contextmod.copy_context(context)
+ lhs_iter = left.infer(context=lhs_context)
+ rhs_iter = right.infer(context=rhs_context)
+ for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
+ if any(value is util.Uninferable for value in (rhs, lhs)):
+ # Don't know how to process this.
+ yield util.Uninferable
+ return
+
+ try:
+ yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow)
+ except exceptions._NonDeducibleTypeHierarchy:
+ yield util.Uninferable
+
+
+@decorators.yes_if_nothing_inferred
+@decorators.path_wrapper
+def infer_binop(self, context=None):
+ return _filter_operation_errors(
+ self, _infer_binop, context, util.BadBinaryOperationMessage
+ )
+
+
+nodes.BinOp._infer_binop = _infer_binop
+nodes.BinOp._infer = infer_binop
+
+
+def _infer_augassign(self, context=None):
+ """Inference logic for augmented binary operations."""
+ if context is None:
+ context = contextmod.InferenceContext()
+
+ rhs_context = context.clone()
+
+ lhs_iter = self.target.infer_lhs(context=context)
+ rhs_iter = self.value.infer(context=rhs_context)
+ for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
+ if any(value is util.Uninferable for value in (rhs, lhs)):
+ # Don't know how to process this.
+ yield util.Uninferable
+ return
+
+ try:
+ yield from _infer_binary_operation(
+ left=lhs,
+ right=rhs,
+ binary_opnode=self,
+ context=context,
+ flow_factory=_get_aug_flow,
+ )
+ except exceptions._NonDeducibleTypeHierarchy:
+ yield util.Uninferable
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_augassign(self, context=None):
+ return _filter_operation_errors(
+ self, _infer_augassign, context, util.BadBinaryOperationMessage
+ )
+
+
+nodes.AugAssign._infer_augassign = _infer_augassign
+nodes.AugAssign._infer = infer_augassign
+
+# End of binary operation inference.
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_arguments(self, context=None):
+ name = context.lookupname
+ if name is None:
+ raise exceptions.InferenceError(node=self, context=context)
+ return protocols._arguments_infer_argname(self, name, context)
+
+
+nodes.Arguments._infer = infer_arguments
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_assign(self, context=None):
+ """infer a AssignName/AssignAttr: need to inspect the RHS part of the
+ assign node
+ """
+ stmt = self.statement()
+ if isinstance(stmt, nodes.AugAssign):
+ return stmt.infer(context)
+
+ stmts = list(self.assigned_stmts(context=context))
+ return bases._infer_stmts(stmts, context)
+
+
+nodes.AssignName._infer = infer_assign
+nodes.AssignAttr._infer = infer_assign
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_empty_node(self, context=None):
+ if not self.has_underlying_object():
+ yield util.Uninferable
+ else:
+ try:
+ yield from MANAGER.infer_ast_from_something(self.object, context=context)
+ except exceptions.AstroidError:
+ yield util.Uninferable
+
+
+nodes.EmptyNode._infer = infer_empty_node
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_index(self, context=None):
+ return self.value.infer(context)
+
+
+nodes.Index._infer = infer_index
+
+# TODO: move directly into bases.Instance when the dependency hell
+# will be solved.
+def instance_getitem(self, index, context=None):
+ # Rewrap index to Const for this case
+ new_context = contextmod.bind_context_to_node(context, self)
+ if not context:
+ context = new_context
+
+ # Create a new callcontext for providing index as an argument.
+ new_context.callcontext = contextmod.CallContext(args=[index])
+
+ method = next(self.igetattr("__getitem__", context=context), None)
+ if not isinstance(method, bases.BoundMethod):
+ raise exceptions.InferenceError(
+ "Could not find __getitem__ for {node!r}.", node=self, context=context
+ )
+
+ try:
+ return next(method.infer_call_result(self, new_context))
+ except StopIteration as exc:
+ raise exceptions.InferenceError(
+ message="Inference for {node!r}[{index!s}] failed.",
+ node=self,
+ index=index,
+ context=context,
+ ) from exc
+
+
+bases.Instance.getitem = instance_getitem
+
+
+def _populate_context_lookup(call, context):
+ # Allows context to be saved for later
+ # for inference inside a function
+ context_lookup = {}
+ if context is None:
+ return context_lookup
+ for arg in call.args:
+ if isinstance(arg, nodes.Starred):
+ context_lookup[arg.value] = context
+ else:
+ context_lookup[arg] = context
+ keywords = call.keywords if call.keywords is not None else []
+ for keyword in keywords:
+ context_lookup[keyword.value] = context
+ return context_lookup
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__init__.py b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..09092e8
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc
new file mode 100644
index 0000000..2ec807d
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc
new file mode 100644
index 0000000..478c820
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..a7a1bea
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc
new file mode 100644
index 0000000..097e2b7
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc
new file mode 100644
index 0000000..11cec29
Binary files /dev/null and b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc differ
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/spec.py b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/spec.py
new file mode 100644
index 0000000..982f9b6
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/spec.py
@@ -0,0 +1,337 @@
+# Copyright (c) 2016-2018 Claudiu Popa
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017 Chris Philip
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2017 ioanatia
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Nick Drozd
+
+import abc
+import collections
+import enum
+import imp
+import os
+import sys
+import zipimport
+
+try:
+ import importlib.machinery
+
+ _HAS_MACHINERY = True
+except ImportError:
+ _HAS_MACHINERY = False
+
+try:
+ from functools import lru_cache
+except ImportError:
+ from backports.functools_lru_cache import lru_cache
+
+from . import util
+
+ModuleType = enum.Enum(
+ "ModuleType",
+ "C_BUILTIN C_EXTENSION PKG_DIRECTORY "
+ "PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE "
+ "PY_SOURCE PY_ZIPMODULE PY_NAMESPACE",
+)
+_ImpTypes = {
+ imp.C_BUILTIN: ModuleType.C_BUILTIN,
+ imp.C_EXTENSION: ModuleType.C_EXTENSION,
+ imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY,
+ imp.PY_COMPILED: ModuleType.PY_COMPILED,
+ imp.PY_FROZEN: ModuleType.PY_FROZEN,
+ imp.PY_SOURCE: ModuleType.PY_SOURCE,
+}
+if hasattr(imp, "PY_RESOURCE"):
+ _ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE
+if hasattr(imp, "PY_CODERESOURCE"):
+ _ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE
+
+
+def _imp_type_to_module_type(imp_type):
+ return _ImpTypes[imp_type]
+
+
+_ModuleSpec = collections.namedtuple(
+ "_ModuleSpec", "name type location " "origin submodule_search_locations"
+)
+
+
+class ModuleSpec(_ModuleSpec):
+ """Defines a class similar to PEP 420's ModuleSpec
+
+ A module spec defines a name of a module, its type, location
+ and where submodules can be found, if the module is a package.
+ """
+
+ def __new__(
+ cls,
+ name,
+ module_type,
+ location=None,
+ origin=None,
+ submodule_search_locations=None,
+ ):
+ return _ModuleSpec.__new__(
+ cls,
+ name=name,
+ type=module_type,
+ location=location,
+ origin=origin,
+ submodule_search_locations=submodule_search_locations,
+ )
+
+
+class Finder:
+ """A finder is a class which knows how to find a particular module."""
+
+ def __init__(self, path=None):
+ self._path = path or sys.path
+
+ @abc.abstractmethod
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ """Find the given module
+
+ Each finder is responsible for each protocol of finding, as long as
+ they all return a ModuleSpec.
+
+ :param str modname: The module which needs to be searched.
+ :param list module_parts: It should be a list of strings,
+ where each part contributes to the module's
+ namespace.
+ :param list processed: What parts from the module parts were processed
+ so far.
+ :param list submodule_path: A list of paths where the module
+ can be looked into.
+ :returns: A ModuleSpec, describing how and where the module was found,
+ None, otherwise.
+ """
+
+ def contribute_to_path(self, spec, processed):
+ """Get a list of extra paths where this finder can search."""
+
+
+class ImpFinder(Finder):
+ """A finder based on the imp module."""
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ if submodule_path is not None:
+ submodule_path = list(submodule_path)
+ try:
+ stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path)
+ except ImportError:
+ return None
+
+ # Close resources.
+ if stream:
+ stream.close()
+
+ return ModuleSpec(
+ name=modname,
+ location=mp_filename,
+ module_type=_imp_type_to_module_type(mp_desc[2]),
+ )
+
+ def contribute_to_path(self, spec, processed):
+ if spec.location is None:
+ # Builtin.
+ return None
+
+ if _is_setuptools_namespace(spec.location):
+ # extend_path is called, search sys.path for module/packages
+ # of this name see pkgutil.extend_path documentation
+ path = [
+ os.path.join(p, *processed)
+ for p in sys.path
+ if os.path.isdir(os.path.join(p, *processed))
+ ]
+ else:
+ path = [spec.location]
+ return path
+
+
+class ExplicitNamespacePackageFinder(ImpFinder):
+ """A finder for the explicit namespace packages, generated through pkg_resources."""
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ if processed:
+ modname = ".".join(processed + [modname])
+ if util.is_namespace(modname) and modname in sys.modules:
+ submodule_path = sys.modules[modname].__path__
+ return ModuleSpec(
+ name=modname,
+ location="",
+ origin="namespace",
+ module_type=ModuleType.PY_NAMESPACE,
+ submodule_search_locations=submodule_path,
+ )
+ return None
+
+ def contribute_to_path(self, spec, processed):
+ return spec.submodule_search_locations
+
+
+class ZipFinder(Finder):
+ """Finder that knows how to find a module inside zip files."""
+
+ def __init__(self, path):
+ super(ZipFinder, self).__init__(path)
+ self._zipimporters = _precache_zipimporters(path)
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ try:
+ file_type, filename, path = _search_zip(module_parts, self._zipimporters)
+ except ImportError:
+ return None
+
+ return ModuleSpec(
+ name=modname,
+ location=filename,
+ origin="egg",
+ module_type=file_type,
+ submodule_search_locations=path,
+ )
+
+
+class PathSpecFinder(Finder):
+ """Finder based on importlib.machinery.PathFinder."""
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path)
+ if spec:
+ # origin can be either a string on older Python versions
+ # or None in case it is a namespace package:
+ # https://github.com/python/cpython/pull/5481
+ is_namespace_pkg = spec.origin in ("namespace", None)
+ location = spec.origin if not is_namespace_pkg else None
+ module_type = ModuleType.PY_NAMESPACE if is_namespace_pkg else None
+ spec = ModuleSpec(
+ name=spec.name,
+ location=location,
+ origin=spec.origin,
+ module_type=module_type,
+ submodule_search_locations=list(spec.submodule_search_locations or []),
+ )
+ return spec
+
+ def contribute_to_path(self, spec, processed):
+ if spec.type == ModuleType.PY_NAMESPACE:
+ return spec.submodule_search_locations
+ return None
+
+
+_SPEC_FINDERS = (ImpFinder, ZipFinder)
+if _HAS_MACHINERY and sys.version_info[:2] >= (3, 4):
+ _SPEC_FINDERS += (PathSpecFinder,)
+_SPEC_FINDERS += (ExplicitNamespacePackageFinder,)
+
+
+def _is_setuptools_namespace(location):
+ try:
+ with open(os.path.join(location, "__init__.py"), "rb") as stream:
+ data = stream.read(4096)
+ except IOError:
+ pass
+ else:
+ extend_path = b"pkgutil" in data and b"extend_path" in data
+ declare_namespace = (
+ b"pkg_resources" in data and b"declare_namespace(__name__)" in data
+ )
+ return extend_path or declare_namespace
+
+
+@lru_cache()
+def _cached_set_diff(left, right):
+ result = set(left)
+ result.difference_update(right)
+ return result
+
+
+def _precache_zipimporters(path=None):
+ pic = sys.path_importer_cache
+
+ # When measured, despite having the same complexity (O(n)),
+ # converting to tuples and then caching the conversion to sets
+ # and the set difference is faster than converting to sets
+ # and then only caching the set difference.
+
+ req_paths = tuple(path or sys.path)
+ cached_paths = tuple(pic)
+ new_paths = _cached_set_diff(req_paths, cached_paths)
+ for entry_path in new_paths:
+ try:
+ pic[entry_path] = zipimport.zipimporter(entry_path)
+ except zipimport.ZipImportError:
+ continue
+ return pic
+
+
+def _search_zip(modpath, pic):
+ for filepath, importer in list(pic.items()):
+ if importer is not None:
+ found = importer.find_module(modpath[0])
+ if found:
+ if not importer.find_module(os.path.sep.join(modpath)):
+ raise ImportError(
+ "No module named %s in %s/%s"
+ % (".".join(modpath[1:]), filepath, modpath)
+ )
+ # import code; code.interact(local=locals())
+ return (
+ ModuleType.PY_ZIPMODULE,
+ os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath),
+ filepath,
+ )
+ raise ImportError("No module named %s" % ".".join(modpath))
+
+
+def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path):
+ finders = [finder(search_path) for finder in _SPEC_FINDERS]
+ for finder in finders:
+ spec = finder.find_module(modname, module_parts, processed, submodule_path)
+ if spec is None:
+ continue
+ return finder, spec
+
+ raise ImportError("No module named %s" % ".".join(module_parts))
+
+
+def find_spec(modpath, path=None):
+ """Find a spec for the given module.
+
+ :type modpath: list or tuple
+ :param modpath:
+ split module's name (i.e name of a module or package split
+ on '.'), with leading empty strings for explicit relative import
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :rtype: ModuleSpec
+ :return: A module spec, which describes how the module was
+ found and where.
+ """
+ _path = path or sys.path
+
+ # Need a copy for not mutating the argument.
+ modpath = modpath[:]
+
+ submodule_path = None
+ module_parts = modpath[:]
+ processed = []
+
+ while modpath:
+ modname = modpath.pop(0)
+ finder, spec = _find_spec_with_path(
+ _path, modname, module_parts, processed, submodule_path or path
+ )
+ processed.append(modname)
+ if modpath:
+ submodule_path = finder.contribute_to_path(spec, processed)
+
+ if spec.type == ModuleType.PKG_DIRECTORY:
+ spec = spec._replace(submodule_search_locations=submodule_path)
+
+ return spec
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/util.py b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/util.py
new file mode 100644
index 0000000..a917bd3
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/_import/util.py
@@ -0,0 +1,10 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
+
+
+def is_namespace(modname):
+ return pkg_resources is not None and modname in pkg_resources._namespace_packages
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py
new file mode 100644
index 0000000..0ae9bc9
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2016-2018 Claudiu Popa
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Contains logic for retrieving special methods.
+
+This implementation does not rely on the dot attribute access
+logic, found in ``.getattr()``. The difference between these two
+is that the dunder methods are looked with the type slots
+(you can find more about these here
+http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/)
+As such, the lookup for the special methods is actually simpler than
+the dot attribute access.
+"""
+import itertools
+
+import astroid
+from astroid import exceptions
+
+
+def _lookup_in_mro(node, name):
+ attrs = node.locals.get(name, [])
+
+ nodes = itertools.chain.from_iterable(
+ ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True)
+ )
+ values = list(itertools.chain(attrs, nodes))
+ if not values:
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+ return values
+
+
+def lookup(node, name):
+ """Lookup the given special method name in the given *node*
+
+ If the special method was found, then a list of attributes
+ will be returned. Otherwise, `astroid.AttributeInferenceError`
+ is going to be raised.
+ """
+ if isinstance(
+ node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set)
+ ):
+ return _builtin_lookup(node, name)
+ if isinstance(node, astroid.Instance):
+ return _lookup_in_mro(node, name)
+ if isinstance(node, astroid.ClassDef):
+ return _class_lookup(node, name)
+
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+
+def _class_lookup(node, name):
+ metaclass = node.metaclass()
+ if metaclass is None:
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+ return _lookup_in_mro(metaclass, name)
+
+
+def _builtin_lookup(node, name):
+ values = node.locals.get(name, [])
+ if not values:
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+ return values
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/interpreter/objectmodel.py b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/objectmodel.py
new file mode 100644
index 0000000..65e9c64
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/interpreter/objectmodel.py
@@ -0,0 +1,673 @@
+# Copyright (c) 2016-2018 Claudiu Popa
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017-2018 Bryce Guinta
+# Copyright (c) 2017 Ceridwen
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Nick Drozd
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+"""
+Data object model, as per https://docs.python.org/3/reference/datamodel.html.
+
+This module describes, at least partially, a data object model for some
+of astroid's nodes. The model contains special attributes that nodes such
+as functions, classes, modules etc have, such as __doc__, __class__,
+__module__ etc, being used when doing attribute lookups over nodes.
+
+For instance, inferring `obj.__class__` will first trigger an inference
+of the `obj` variable. If it was successfully inferred, then an attribute
+`__class__ will be looked for in the inferred object. This is the part
+where the data model occurs. The model is attached to those nodes
+and the lookup mechanism will try to see if attributes such as
+`__class__` are defined by the model or not. If they are defined,
+the model will be requested to return the corresponding value of that
+attribute. Thus the model can be viewed as a special part of the lookup
+mechanism.
+"""
+
+import builtins
+import itertools
+import pprint
+import os
+import types
+from functools import lru_cache
+
+import astroid
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import node_classes
+
+
+def _dunder_dict(instance, attributes):
+ obj = node_classes.Dict(parent=instance)
+
+ # Convert the keys to node strings
+ keys = [
+ node_classes.Const(value=value, parent=obj) for value in list(attributes.keys())
+ ]
+
+ # The original attribute has a list of elements for each key,
+ # but that is not useful for retrieving the special attribute's value.
+ # In this case, we're picking the last value from each list.
+ values = [elem[-1] for elem in attributes.values()]
+
+ obj.postinit(list(zip(keys, values)))
+ return obj
+
+
+class ObjectModel:
+ def __init__(self):
+ self._instance = None
+
+ def __repr__(self):
+ result = []
+ cname = type(self).__name__
+ string = "%(cname)s(%(fields)s)"
+ alignment = len(cname) + 1
+ for field in sorted(self.attributes()):
+ width = 80 - len(field) - alignment
+ lines = pprint.pformat(field, indent=2, width=width).splitlines(True)
+
+ inner = [lines[0]]
+ for line in lines[1:]:
+ inner.append(" " * alignment + line)
+ result.append(field)
+
+ return string % {
+ "cname": cname,
+ "fields": (",\n" + " " * alignment).join(result),
+ }
+
+ def __call__(self, instance):
+ self._instance = instance
+ return self
+
+ def __get__(self, instance, cls=None):
+ # ObjectModel needs to be a descriptor so that just doing
+ # `special_attributes = SomeObjectModel` should be enough in the body of a node.
+ # But at the same time, node.special_attributes should return an object
+ # which can be used for manipulating the special attributes. That's the reason
+ # we pass the instance through which it got accessed to ObjectModel.__call__,
+ # returning itself afterwards, so we can still have access to the
+ # underlying data model and to the instance for which it got accessed.
+ return self(instance)
+
+ def __contains__(self, name):
+ return name in self.attributes()
+
+ @lru_cache(maxsize=None)
+ def attributes(self):
+ """Get the attributes which are exported by this object model."""
+ return [obj[2:] for obj in dir(self) if obj.startswith("py")]
+
+ def lookup(self, name):
+ """Look up the given *name* in the current model
+
+ It should return an AST or an interpreter object,
+ but if the name is not found, then an AttributeInferenceError will be raised.
+ """
+
+ if name in self.attributes():
+ return getattr(self, "py" + name)
+ raise exceptions.AttributeInferenceError(target=self._instance, attribute=name)
+
+
+class ModuleModel(ObjectModel):
+ def _builtins(self):
+ builtins_ast_module = astroid.MANAGER.astroid_cache[builtins.__name__]
+ return builtins_ast_module.special_attributes.lookup("__dict__")
+
+ @property
+ def pybuiltins(self):
+ return self._builtins()
+
+ # __path__ is a standard attribute on *packages* not
+ # non-package modules. The only mention of it in the
+ # official 2.7 documentation I can find is in the
+ # tutorial.
+
+ @property
+ def py__path__(self):
+ if not self._instance.package:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="__path__"
+ )
+
+ path_objs = [
+ node_classes.Const(
+ value=path
+ if not path.endswith("__init__.py")
+ else os.path.dirname(path),
+ parent=self._instance,
+ )
+ for path in self._instance.path
+ ]
+
+ container = node_classes.List(parent=self._instance)
+ container.postinit(path_objs)
+
+ return container
+
+ @property
+ def py__name__(self):
+ return node_classes.Const(value=self._instance.name, parent=self._instance)
+
+ @property
+ def py__doc__(self):
+ return node_classes.Const(value=self._instance.doc, parent=self._instance)
+
+ @property
+ def py__file__(self):
+ return node_classes.Const(value=self._instance.file, parent=self._instance)
+
+ @property
+ def py__dict__(self):
+ return _dunder_dict(self._instance, self._instance.globals)
+
+ # __package__ isn't mentioned anywhere outside a PEP:
+ # https://www.python.org/dev/peps/pep-0366/
+ @property
+ def py__package__(self):
+ if not self._instance.package:
+ value = ""
+ else:
+ value = self._instance.name
+
+ return node_classes.Const(value=value, parent=self._instance)
+
+ # These are related to the Python 3 implementation of the
+ # import system,
+ # https://docs.python.org/3/reference/import.html#import-related-module-attributes
+
+ @property
+ def py__spec__(self):
+ # No handling for now.
+ return node_classes.Unknown()
+
+ @property
+ def py__loader__(self):
+ # No handling for now.
+ return node_classes.Unknown()
+
+ @property
+ def py__cached__(self):
+ # No handling for now.
+ return node_classes.Unknown()
+
+
+class FunctionModel(ObjectModel):
+ @property
+ def py__name__(self):
+ return node_classes.Const(value=self._instance.name, parent=self._instance)
+
+ @property
+ def py__doc__(self):
+ return node_classes.Const(value=self._instance.doc, parent=self._instance)
+
+ @property
+ def py__qualname__(self):
+ return node_classes.Const(value=self._instance.qname(), parent=self._instance)
+
+ @property
+ def py__defaults__(self):
+ func = self._instance
+ if not func.args.defaults:
+ return node_classes.Const(value=None, parent=func)
+
+ defaults_obj = node_classes.Tuple(parent=func)
+ defaults_obj.postinit(func.args.defaults)
+ return defaults_obj
+
+ @property
+ def py__annotations__(self):
+ obj = node_classes.Dict(parent=self._instance)
+
+ if not self._instance.returns:
+ returns = None
+ else:
+ returns = self._instance.returns
+
+ args = self._instance.args
+ pair_annotations = itertools.chain(
+ zip(args.args or [], args.annotations),
+ zip(args.kwonlyargs, args.kwonlyargs_annotations),
+ )
+
+ annotations = {
+ arg.name: annotation for (arg, annotation) in pair_annotations if annotation
+ }
+ if args.varargannotation:
+ annotations[args.vararg] = args.varargannotation
+ if args.kwargannotation:
+ annotations[args.kwarg] = args.kwargannotation
+ if returns:
+ annotations["return"] = returns
+
+ items = [
+ (node_classes.Const(key, parent=obj), value)
+ for (key, value) in annotations.items()
+ ]
+
+ obj.postinit(items)
+ return obj
+
+ @property
+ def py__dict__(self):
+ return node_classes.Dict(parent=self._instance)
+
+ py__globals__ = py__dict__
+
+ @property
+ def py__kwdefaults__(self):
+ def _default_args(args, parent):
+ for arg in args.kwonlyargs:
+ try:
+ default = args.default_value(arg.name)
+ except exceptions.NoDefault:
+ continue
+
+ name = node_classes.Const(arg.name, parent=parent)
+ yield name, default
+
+ args = self._instance.args
+ obj = node_classes.Dict(parent=self._instance)
+ defaults = dict(_default_args(args, obj))
+
+ obj.postinit(list(defaults.items()))
+ return obj
+
+ @property
+ def py__module__(self):
+ return node_classes.Const(self._instance.root().qname())
+
+ @property
+ def py__get__(self):
+ from astroid import bases
+
+ func = self._instance
+
+ class DescriptorBoundMethod(bases.BoundMethod):
+ """Bound method which knows how to understand calling descriptor binding."""
+
+ def implicit_parameters(self):
+ # Different than BoundMethod since the signature
+ # is different.
+ return 0
+
+ def infer_call_result(self, caller, context=None):
+ if len(caller.args) != 2:
+ raise exceptions.InferenceError(
+ "Invalid arguments for descriptor binding",
+ target=self,
+ context=context,
+ )
+
+ context = contextmod.copy_context(context)
+ cls = next(caller.args[0].infer(context=context))
+
+ if cls is astroid.Uninferable:
+ raise exceptions.InferenceError(
+ "Invalid class inferred", target=self, context=context
+ )
+
+ # For some reason func is a Node that the below
+ # code is not expecting
+ if isinstance(func, bases.BoundMethod):
+ yield func
+ return
+
+ # Rebuild the original value, but with the parent set as the
+ # class where it will be bound.
+ new_func = func.__class__(
+ name=func.name,
+ doc=func.doc,
+ lineno=func.lineno,
+ col_offset=func.col_offset,
+ parent=cls,
+ )
+ # pylint: disable=no-member
+ new_func.postinit(func.args, func.body, func.decorators, func.returns)
+
+ # Build a proper bound method that points to our newly built function.
+ proxy = bases.UnboundMethod(new_func)
+ yield bases.BoundMethod(proxy=proxy, bound=cls)
+
+ @property
+ def args(self):
+ """Overwrite the underlying args to match those of the underlying func
+
+ Usually the underlying *func* is a function/method, as in:
+
+ def test(self):
+ pass
+
+ This has only the *self* parameter but when we access test.__get__
+ we get a new object which has two parameters, *self* and *type*.
+ """
+ nonlocal func
+ params = func.args.args.copy()
+ params.append(astroid.AssignName(name="type"))
+ arguments = astroid.Arguments(parent=func.args.parent)
+ arguments.postinit(
+ args=params,
+ defaults=[],
+ kwonlyargs=[],
+ kw_defaults=[],
+ annotations=[],
+ )
+ return arguments
+
+ return DescriptorBoundMethod(proxy=self._instance, bound=self._instance)
+
+ # These are here just for completion.
+ @property
+ def py__ne__(self):
+ return node_classes.Unknown()
+
+ py__subclasshook__ = py__ne__
+ py__str__ = py__ne__
+ py__sizeof__ = py__ne__
+ py__setattr__ = py__ne__
+ py__repr__ = py__ne__
+ py__reduce__ = py__ne__
+ py__reduce_ex__ = py__ne__
+ py__new__ = py__ne__
+ py__lt__ = py__ne__
+ py__eq__ = py__ne__
+ py__gt__ = py__ne__
+ py__format__ = py__ne__
+ py__delattr__ = py__ne__
+ py__getattribute__ = py__ne__
+ py__hash__ = py__ne__
+ py__init__ = py__ne__
+ py__dir__ = py__ne__
+ py__call__ = py__ne__
+ py__class__ = py__ne__
+ py__closure__ = py__ne__
+ py__code__ = py__ne__
+
+
+class ClassModel(ObjectModel):
+ @property
+ def py__module__(self):
+ return node_classes.Const(self._instance.root().qname())
+
+ @property
+ def py__name__(self):
+ return node_classes.Const(self._instance.name)
+
+ @property
+ def py__qualname__(self):
+ return node_classes.Const(self._instance.qname())
+
+ @property
+ def py__doc__(self):
+ return node_classes.Const(self._instance.doc)
+
+ @property
+ def py__mro__(self):
+ if not self._instance.newstyle:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="__mro__"
+ )
+
+ mro = self._instance.mro()
+ obj = node_classes.Tuple(parent=self._instance)
+ obj.postinit(mro)
+ return obj
+
+ @property
+ def pymro(self):
+ if not self._instance.newstyle:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="mro"
+ )
+
+ from astroid import bases
+
+ other_self = self
+
+ # Cls.mro is a method and we need to return one in order to have a proper inference.
+ # The method we're returning is capable of inferring the underlying MRO though.
+ class MroBoundMethod(bases.BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield other_self.py__mro__
+
+ implicit_metaclass = self._instance.implicit_metaclass()
+ mro_method = implicit_metaclass.locals["mro"][0]
+ return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass)
+
+ @property
+ def py__bases__(self):
+ obj = node_classes.Tuple()
+ context = contextmod.InferenceContext()
+ elts = list(self._instance._inferred_bases(context))
+ obj.postinit(elts=elts)
+ return obj
+
+ @property
+ def py__class__(self):
+ from astroid import helpers
+
+ return helpers.object_type(self._instance)
+
+ @property
+ def py__subclasses__(self):
+ """Get the subclasses of the underlying class
+
+ This looks only in the current module for retrieving the subclasses,
+ thus it might miss a couple of them.
+ """
+ from astroid import bases
+ from astroid import scoped_nodes
+
+ if not self._instance.newstyle:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="__subclasses__"
+ )
+
+ qname = self._instance.qname()
+ root = self._instance.root()
+ classes = [
+ cls
+ for cls in root.nodes_of_class(scoped_nodes.ClassDef)
+ if cls != self._instance and cls.is_subtype_of(qname)
+ ]
+
+ obj = node_classes.List(parent=self._instance)
+ obj.postinit(classes)
+
+ class SubclassesBoundMethod(bases.BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield obj
+
+ implicit_metaclass = self._instance.implicit_metaclass()
+ subclasses_method = implicit_metaclass.locals["__subclasses__"][0]
+ return SubclassesBoundMethod(proxy=subclasses_method, bound=implicit_metaclass)
+
+ @property
+ def py__dict__(self):
+ return node_classes.Dict(parent=self._instance)
+
+
+class SuperModel(ObjectModel):
+ @property
+ def py__thisclass__(self):
+ return self._instance.mro_pointer
+
+ @property
+ def py__self_class__(self):
+ return self._instance._self_class
+
+ @property
+ def py__self__(self):
+ return self._instance.type
+
+ @property
+ def py__class__(self):
+ return self._instance._proxied
+
+
+class UnboundMethodModel(ObjectModel):
+ @property
+ def py__class__(self):
+ from astroid import helpers
+
+ return helpers.object_type(self._instance)
+
+ @property
+ def py__func__(self):
+ return self._instance._proxied
+
+ @property
+ def py__self__(self):
+ return node_classes.Const(value=None, parent=self._instance)
+
+ pyim_func = py__func__
+ pyim_class = py__class__
+ pyim_self = py__self__
+
+
+class BoundMethodModel(FunctionModel):
+ @property
+ def py__func__(self):
+ return self._instance._proxied._proxied
+
+ @property
+ def py__self__(self):
+ return self._instance.bound
+
+
+class GeneratorModel(FunctionModel):
+ def __new__(cls, *args, **kwargs):
+ # Append the values from the GeneratorType unto this object.
+ ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs)
+ generator = astroid.MANAGER.astroid_cache[builtins.__name__]["generator"]
+ for name, values in generator.locals.items():
+ method = values[0]
+ patched = lambda cls, meth=method: meth
+
+ setattr(type(ret), "py" + name, property(patched))
+
+ return ret
+
+ @property
+ def py__name__(self):
+ return node_classes.Const(
+ value=self._instance.parent.name, parent=self._instance
+ )
+
+ @property
+ def py__doc__(self):
+ return node_classes.Const(
+ value=self._instance.parent.doc, parent=self._instance
+ )
+
+
+class AsyncGeneratorModel(GeneratorModel):
+ def __new__(cls, *args, **kwargs):
+ # Append the values from the AGeneratorType unto this object.
+ ret = super().__new__(cls, *args, **kwargs)
+ astroid_builtins = astroid.MANAGER.astroid_cache[builtins.__name__]
+ generator = astroid_builtins.get("async_generator")
+ if generator is None:
+ # Make it backward compatible.
+ generator = astroid_builtins.get("generator")
+
+ for name, values in generator.locals.items():
+ method = values[0]
+ patched = lambda cls, meth=method: meth
+
+ setattr(type(ret), "py" + name, property(patched))
+
+ return ret
+
+
+class InstanceModel(ObjectModel):
+ @property
+ def py__class__(self):
+ return self._instance._proxied
+
+ @property
+ def py__module__(self):
+ return node_classes.Const(self._instance.root().qname())
+
+ @property
+ def py__doc__(self):
+ return node_classes.Const(self._instance.doc)
+
+ @property
+ def py__dict__(self):
+ return _dunder_dict(self._instance, self._instance.instance_attrs)
+
+
+class ExceptionInstanceModel(InstanceModel):
+ @property
+ def pyargs(self):
+ message = node_classes.Const("")
+ args = node_classes.Tuple(parent=self._instance)
+ args.postinit((message,))
+ return args
+
+ @property
+ def py__traceback__(self):
+ builtins_ast_module = astroid.MANAGER.astroid_cache[builtins.__name__]
+ traceback_type = builtins_ast_module[types.TracebackType.__name__]
+ return traceback_type.instantiate_class()
+
+
+class DictModel(ObjectModel):
+ @property
+ def py__class__(self):
+ return self._instance._proxied
+
+ def _generic_dict_attribute(self, obj, name):
+ """Generate a bound method that can infer the given *obj*."""
+
+ class DictMethodBoundMethod(astroid.BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield obj
+
+ meth = next(self._instance._proxied.igetattr(name))
+ return DictMethodBoundMethod(proxy=meth, bound=self._instance)
+
+ @property
+ def pyitems(self):
+ elems = []
+ obj = node_classes.List(parent=self._instance)
+ for key, value in self._instance.items:
+ elem = node_classes.Tuple(parent=obj)
+ elem.postinit((key, value))
+ elems.append(elem)
+ obj.postinit(elts=elems)
+
+ from astroid import objects
+
+ obj = objects.DictItems(obj)
+
+ return self._generic_dict_attribute(obj, "items")
+
+ @property
+ def pykeys(self):
+ keys = [key for (key, _) in self._instance.items]
+ obj = node_classes.List(parent=self._instance)
+ obj.postinit(elts=keys)
+
+ from astroid import objects
+
+ obj = objects.DictKeys(obj)
+
+ return self._generic_dict_attribute(obj, "keys")
+
+ @property
+ def pyvalues(self):
+
+ values = [value for (_, value) in self._instance.items]
+ obj = node_classes.List(parent=self._instance)
+ obj.postinit(values)
+
+ from astroid import objects
+
+ obj = objects.DictValues(obj)
+
+ return self._generic_dict_attribute(obj, "values")
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/manager.py b/basic python programmes/venv/Lib/site-packages/astroid/manager.py
new file mode 100644
index 0000000..f05588a
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/manager.py
@@ -0,0 +1,319 @@
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 BioGeek
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017 Iva Miholic
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""astroid manager: avoid multiple astroid build of a same module when
+possible by providing a class responsible to get astroid representation
+from various source and using a cache of built modules)
+"""
+
+import os
+import zipimport
+
+from astroid import exceptions
+from astroid.interpreter._import import spec
+from astroid import modutils
+from astroid import transforms
+
+
+def safe_repr(obj):
+ try:
+ return repr(obj)
+ except Exception: # pylint: disable=broad-except
+ return "???"
+
+
+class AstroidManager:
+ """the astroid manager, responsible to build astroid from files
+ or modules.
+
+ Use the Borg pattern.
+ """
+
+ name = "astroid loader"
+ brain = {}
+
+ def __init__(self):
+ self.__dict__ = AstroidManager.brain
+ if not self.__dict__:
+ # NOTE: cache entries are added by the [re]builder
+ self.astroid_cache = {}
+ self._mod_file_cache = {}
+ self._failed_import_hooks = []
+ self.always_load_extensions = False
+ self.optimize_ast = False
+ self.extension_package_whitelist = set()
+ self._transform = transforms.TransformVisitor()
+
+ # Export these APIs for convenience
+ self.register_transform = self._transform.register_transform
+ self.unregister_transform = self._transform.unregister_transform
+ self.max_inferable_values = 100
+
+ def visit_transforms(self, node):
+ """Visit the transforms and apply them to the given *node*."""
+ return self._transform.visit(node)
+
+ def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
+ """given a module name, return the astroid object"""
+ try:
+ filepath = modutils.get_source_file(filepath, include_no_ext=True)
+ source = True
+ except modutils.NoSourceFile:
+ pass
+ if modname is None:
+ try:
+ modname = ".".join(modutils.modpath_from_file(filepath))
+ except ImportError:
+ modname = filepath
+ if (
+ modname in self.astroid_cache
+ and self.astroid_cache[modname].file == filepath
+ ):
+ return self.astroid_cache[modname]
+ if source:
+ from astroid.builder import AstroidBuilder
+
+ return AstroidBuilder(self).file_build(filepath, modname)
+ if fallback and modname:
+ return self.ast_from_module_name(modname)
+ raise exceptions.AstroidBuildingError(
+ "Unable to build an AST for {path}.", path=filepath
+ )
+
+ def _build_stub_module(self, modname):
+ from astroid.builder import AstroidBuilder
+
+ return AstroidBuilder(self).string_build("", modname)
+
+ def _build_namespace_module(self, modname, path):
+ from astroid.builder import build_namespace_package_module
+
+ return build_namespace_package_module(modname, path)
+
+ def _can_load_extension(self, modname):
+ if self.always_load_extensions:
+ return True
+ if modutils.is_standard_module(modname):
+ return True
+ parts = modname.split(".")
+ return any(
+ ".".join(parts[:x]) in self.extension_package_whitelist
+ for x in range(1, len(parts) + 1)
+ )
+
+ def ast_from_module_name(self, modname, context_file=None):
+ """given a module name, return the astroid object"""
+ if modname in self.astroid_cache:
+ return self.astroid_cache[modname]
+ if modname == "__main__":
+ return self._build_stub_module(modname)
+ old_cwd = os.getcwd()
+ if context_file:
+ os.chdir(os.path.dirname(context_file))
+ try:
+ found_spec = self.file_from_module_name(modname, context_file)
+ if found_spec.type == spec.ModuleType.PY_ZIPMODULE:
+ module = self.zip_import_data(found_spec.location)
+ if module is not None:
+ return module
+
+ elif found_spec.type in (
+ spec.ModuleType.C_BUILTIN,
+ spec.ModuleType.C_EXTENSION,
+ ):
+ if (
+ found_spec.type == spec.ModuleType.C_EXTENSION
+ and not self._can_load_extension(modname)
+ ):
+ return self._build_stub_module(modname)
+ try:
+ module = modutils.load_module_from_name(modname)
+ except Exception as ex:
+ raise exceptions.AstroidImportError(
+ "Loading {modname} failed with:\n{error}",
+ modname=modname,
+ path=found_spec.location,
+ ) from ex
+ return self.ast_from_module(module, modname)
+
+ elif found_spec.type == spec.ModuleType.PY_COMPILED:
+ raise exceptions.AstroidImportError(
+ "Unable to load compiled module {modname}.",
+ modname=modname,
+ path=found_spec.location,
+ )
+
+ elif found_spec.type == spec.ModuleType.PY_NAMESPACE:
+ return self._build_namespace_module(
+ modname, found_spec.submodule_search_locations
+ )
+
+ if found_spec.location is None:
+ raise exceptions.AstroidImportError(
+ "Can't find a file for module {modname}.", modname=modname
+ )
+
+ return self.ast_from_file(found_spec.location, modname, fallback=False)
+ except exceptions.AstroidBuildingError as e:
+ for hook in self._failed_import_hooks:
+ try:
+ return hook(modname)
+ except exceptions.AstroidBuildingError:
+ pass
+ raise e
+ finally:
+ os.chdir(old_cwd)
+
+ def zip_import_data(self, filepath):
+ if zipimport is None:
+ return None
+ from astroid.builder import AstroidBuilder
+
+ builder = AstroidBuilder(self)
+ for ext in (".zip", ".egg"):
+ try:
+ eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
+ except ValueError:
+ continue
+ try:
+ importer = zipimport.zipimporter(eggpath + ext)
+ zmodname = resource.replace(os.path.sep, ".")
+ if importer.is_package(resource):
+ zmodname = zmodname + ".__init__"
+ module = builder.string_build(
+ importer.get_source(resource), zmodname, filepath
+ )
+ return module
+ except Exception: # pylint: disable=broad-except
+ continue
+ return None
+
+ def file_from_module_name(self, modname, contextfile):
+ try:
+ value = self._mod_file_cache[(modname, contextfile)]
+ except KeyError:
+ try:
+ value = modutils.file_info_from_modpath(
+ modname.split("."), context_file=contextfile
+ )
+ except ImportError as ex:
+ value = exceptions.AstroidImportError(
+ "Failed to import module {modname} with error:\n{error}.",
+ modname=modname,
+ error=ex,
+ )
+ self._mod_file_cache[(modname, contextfile)] = value
+ if isinstance(value, exceptions.AstroidBuildingError):
+ raise value
+ return value
+
+ def ast_from_module(self, module, modname=None):
+ """given an imported module, return the astroid object"""
+ modname = modname or module.__name__
+ if modname in self.astroid_cache:
+ return self.astroid_cache[modname]
+ try:
+ # some builtin modules don't have __file__ attribute
+ filepath = module.__file__
+ if modutils.is_python_source(filepath):
+ return self.ast_from_file(filepath, modname)
+ except AttributeError:
+ pass
+ from astroid.builder import AstroidBuilder
+
+ return AstroidBuilder(self).module_build(module, modname)
+
+ def ast_from_class(self, klass, modname=None):
+ """get astroid for the given class"""
+ if modname is None:
+ try:
+ modname = klass.__module__
+ except AttributeError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to get module for class {class_name}.",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ modname=modname,
+ ) from exc
+ modastroid = self.ast_from_module_name(modname)
+ return modastroid.getattr(klass.__name__)[0] # XXX
+
+ def infer_ast_from_something(self, obj, context=None):
+ """infer astroid for the given class"""
+ if hasattr(obj, "__class__") and not isinstance(obj, type):
+ klass = obj.__class__
+ else:
+ klass = obj
+ try:
+ modname = klass.__module__
+ except AttributeError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to get module for {class_repr}.",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ except Exception as exc:
+ raise exceptions.AstroidImportError(
+ "Unexpected error while retrieving module for {class_repr}:\n"
+ "{error}",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ try:
+ name = klass.__name__
+ except AttributeError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to get name for {class_repr}:\n",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ except Exception as exc:
+ raise exceptions.AstroidImportError(
+ "Unexpected error while retrieving name for {class_repr}:\n" "{error}",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ # take care, on living object __module__ is regularly wrong :(
+ modastroid = self.ast_from_module_name(modname)
+ if klass is obj:
+ for inferred in modastroid.igetattr(name, context):
+ yield inferred
+ else:
+ for inferred in modastroid.igetattr(name, context):
+ yield inferred.instantiate_class()
+
+ def register_failed_import_hook(self, hook):
+ """Registers a hook to resolve imports that cannot be found otherwise.
+
+ `hook` must be a function that accepts a single argument `modname` which
+ contains the name of the module or package that could not be imported.
+ If `hook` can resolve the import, must return a node of type `astroid.Module`,
+ otherwise, it must raise `AstroidBuildingError`.
+ """
+ self._failed_import_hooks.append(hook)
+
+ def cache_module(self, module):
+ """Cache a module if no module with the same name is known yet."""
+ self.astroid_cache.setdefault(module.name, module)
+
+ def clear_cache(self, astroid_builtin=None):
+ # XXX clear transforms
+ self.astroid_cache.clear()
+ # force bootstrap again, else we may ends up with cache inconsistency
+ # between the manager and CONST_PROXY, making
+ # unittest_lookup.LookupTC.test_builtin_lookup fail depending on the
+ # test order
+ import astroid.raw_building
+
+ astroid.raw_building._astroid_bootstrapping(astroid_builtin=astroid_builtin)
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/mixins.py b/basic python programmes/venv/Lib/site-packages/astroid/mixins.py
new file mode 100644
index 0000000..497a840
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/mixins.py
@@ -0,0 +1,160 @@
+# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014-2016, 2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""This module contains some mixins for the different nodes.
+"""
+import itertools
+
+from astroid import decorators
+from astroid import exceptions
+
+
+class BlockRangeMixIn:
+ """override block range """
+
+ @decorators.cachedproperty
+ def blockstart_tolineno(self):
+ return self.lineno
+
+ def _elsed_block_range(self, lineno, orelse, last=None):
+ """handle block line numbers range for try/finally, for, if and while
+ statements
+ """
+ if lineno == self.fromlineno:
+ return lineno, lineno
+ if orelse:
+ if lineno >= orelse[0].fromlineno:
+ return lineno, orelse[-1].tolineno
+ return lineno, orelse[0].fromlineno - 1
+ return lineno, last or self.tolineno
+
+
+class FilterStmtsMixin:
+ """Mixin for statement filtering and assignment type"""
+
+ def _get_filtered_stmts(self, _, node, _stmts, mystmt):
+ """method used in _filter_stmts to get statements and trigger break"""
+ if self.statement() is mystmt:
+ # original node's statement is the assignment, only keep
+ # current node (gen exp, list comp)
+ return [node], True
+ return _stmts, False
+
+ def assign_type(self):
+ return self
+
+
+class AssignTypeMixin:
+ def assign_type(self):
+ return self
+
+ def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
+ """method used in filter_stmts"""
+ if self is mystmt:
+ return _stmts, True
+ if self.statement() is mystmt:
+ # original node's statement is the assignment, only keep
+ # current node (gen exp, list comp)
+ return [node], True
+ return _stmts, False
+
+
+class ParentAssignTypeMixin(AssignTypeMixin):
+ def assign_type(self):
+ return self.parent.assign_type()
+
+
+class ImportFromMixin(FilterStmtsMixin):
+ """MixIn for From and Import Nodes"""
+
+ def _infer_name(self, frame, name):
+ return name
+
+ def do_import_module(self, modname=None):
+ """return the ast for a module whose name is imported by
+ """
+ # handle special case where we are on a package node importing a module
+ # using the same name as the package, which may end in an infinite loop
+ # on relative imports
+ # XXX: no more needed ?
+ mymodule = self.root()
+ level = getattr(self, "level", None) # Import as no level
+ if modname is None:
+ modname = self.modname
+ # XXX we should investigate deeper if we really want to check
+ # importing itself: modname and mymodule.name be relative or absolute
+ if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
+ # FIXME: we used to raise InferenceError here, but why ?
+ return mymodule
+
+ return mymodule.import_module(
+ modname, level=level, relative_only=level and level >= 1
+ )
+
+ def real_name(self, asname):
+ """get name from 'as' name"""
+ for name, _asname in self.names:
+ if name == "*":
+ return asname
+ if not _asname:
+ name = name.split(".", 1)[0]
+ _asname = name
+ if asname == _asname:
+ return name
+ raise exceptions.AttributeInferenceError(
+ "Could not find original name for {attribute} in {target!r}",
+ target=self,
+ attribute=asname,
+ )
+
+
+class MultiLineBlockMixin:
+ """Mixin for nodes with multi-line blocks, e.g. For and FunctionDef.
+ Note that this does not apply to every node with a `body` field.
+ For instance, an If node has a multi-line body, but the body of an
+ IfExpr is not multi-line, and hence cannot contain Return nodes,
+ Assign nodes, etc.
+ """
+
+ @decorators.cachedproperty
+ def _multi_line_blocks(self):
+ return tuple(getattr(self, field) for field in self._multi_line_block_fields)
+
+ def _get_return_nodes_skip_functions(self):
+ for block in self._multi_line_blocks:
+ for child_node in block:
+ if child_node.is_function:
+ continue
+ yield from child_node._get_return_nodes_skip_functions()
+
+ def _get_yield_nodes_skip_lambdas(self):
+ for block in self._multi_line_blocks:
+ for child_node in block:
+ if child_node.is_lambda:
+ continue
+ yield from child_node._get_yield_nodes_skip_lambdas()
+
+ @decorators.cached
+ def _get_assign_nodes(self):
+ children_assign_nodes = (
+ child_node._get_assign_nodes()
+ for block in self._multi_line_blocks
+ for child_node in block
+ )
+ return list(itertools.chain.from_iterable(children_assign_nodes))
+
+
+class NoChildrenMixin:
+ """Mixin for nodes with no children, e.g. Pass."""
+
+ def get_children(self):
+ yield from ()
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/modutils.py b/basic python programmes/venv/Lib/site-packages/astroid/modutils.py
new file mode 100644
index 0000000..4e6f918
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/modutils.py
@@ -0,0 +1,704 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Denis Laxalde
+# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2015 Radosław Ganczarek
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Ceridwen
+# Copyright (c) 2018 Mario Corchero
+# Copyright (c) 2018 Mario Corchero
+# Copyright (c) 2018 Anthony Sottile
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Python modules manipulation utility functions.
+
+:type PY_SOURCE_EXTS: tuple(str)
+:var PY_SOURCE_EXTS: list of possible python source file extension
+
+:type STD_LIB_DIRS: set of str
+:var STD_LIB_DIRS: directories where standard modules are located
+
+:type BUILTIN_MODULES: dict
+:var BUILTIN_MODULES: dictionary with builtin module names has key
+"""
+import imp
+import os
+import platform
+import sys
+import itertools
+from distutils.sysconfig import get_python_lib # pylint: disable=import-error
+
+# pylint: disable=import-error, no-name-in-module
+from distutils.errors import DistutilsPlatformError
+
+# distutils is replaced by virtualenv with a module that does
+# weird path manipulations in order to get to the
+# real distutils module.
+
+from .interpreter._import import spec
+from .interpreter._import import util
+
+if sys.platform.startswith("win"):
+ PY_SOURCE_EXTS = ("py", "pyw")
+ PY_COMPILED_EXTS = ("dll", "pyd")
+else:
+ PY_SOURCE_EXTS = ("py",)
+ PY_COMPILED_EXTS = ("so",)
+
+
+try:
+ # The explicit sys.prefix is to work around a patch in virtualenv that
+ # replaces the 'real' sys.prefix (i.e. the location of the binary)
+ # with the prefix from which the virtualenv was created. This throws
+ # off the detection logic for standard library modules, thus the
+ # workaround.
+ STD_LIB_DIRS = {
+ get_python_lib(standard_lib=True, prefix=sys.prefix),
+ # Take care of installations where exec_prefix != prefix.
+ get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
+ get_python_lib(standard_lib=True),
+ }
+# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
+# non-valid path, see https://bugs.pypy.org/issue1164
+except DistutilsPlatformError:
+ STD_LIB_DIRS = set()
+
+if os.name == "nt":
+ STD_LIB_DIRS.add(os.path.join(sys.prefix, "dlls"))
+ try:
+ # real_prefix is defined when running inside virtual environments,
+ # created with the **virtualenv** library.
+ STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "dlls"))
+ except AttributeError:
+ # sys.base_exec_prefix is always defined, but in a virtual environment
+ # created with the stdlib **venv** module, it points to the original
+ # installation, if the virtual env is activated.
+ try:
+ STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, "dlls"))
+ except AttributeError:
+ pass
+
+if platform.python_implementation() == "PyPy":
+ _root = os.path.join(sys.prefix, "lib_pypy")
+ STD_LIB_DIRS.add(_root)
+ try:
+ # real_prefix is defined when running inside virtualenv.
+ STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "lib_pypy"))
+ except AttributeError:
+ pass
+ del _root
+if os.name == "posix":
+ # Need the real prefix is we're under a virtualenv, otherwise
+ # the usual one will do.
+ try:
+ prefix = sys.real_prefix
+ except AttributeError:
+ prefix = sys.prefix
+
+ def _posix_path(path):
+ base_python = "python%d.%d" % sys.version_info[:2]
+ return os.path.join(prefix, path, base_python)
+
+ STD_LIB_DIRS.add(_posix_path("lib"))
+ if sys.maxsize > 2 ** 32:
+ # This tries to fix a problem with /usr/lib64 builds,
+ # where systems are running both 32-bit and 64-bit code
+ # on the same machine, which reflects into the places where
+ # standard library could be found. More details can be found
+ # here http://bugs.python.org/issue1294959.
+ # An easy reproducing case would be
+ # https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753
+ STD_LIB_DIRS.add(_posix_path("lib64"))
+
+EXT_LIB_DIR = get_python_lib()
+IS_JYTHON = platform.python_implementation() == "Jython"
+BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True)
+
+
+class NoSourceFile(Exception):
+ """exception raised when we are not able to get a python
+ source file for a precompiled file
+ """
+
+
+def _normalize_path(path):
+ return os.path.normcase(os.path.abspath(path))
+
+
+def _canonicalize_path(path):
+ return os.path.realpath(os.path.expanduser(path))
+
+
+def _path_from_filename(filename, is_jython=IS_JYTHON):
+ if not is_jython:
+ if sys.version_info > (3, 0):
+ return filename
+ if filename.endswith(".pyc"):
+ return filename[:-1]
+ return filename
+ head, has_pyclass, _ = filename.partition("$py.class")
+ if has_pyclass:
+ return head + ".py"
+ return filename
+
+
+def _handle_blacklist(blacklist, dirnames, filenames):
+ """remove files/directories in the black list
+
+ dirnames/filenames are usually from os.walk
+ """
+ for norecurs in blacklist:
+ if norecurs in dirnames:
+ dirnames.remove(norecurs)
+ elif norecurs in filenames:
+ filenames.remove(norecurs)
+
+
+_NORM_PATH_CACHE = {}
+
+
+def _cache_normalize_path(path):
+ """abspath with caching"""
+ # _module_file calls abspath on every path in sys.path every time it's
+ # called; on a larger codebase this easily adds up to half a second just
+ # assembling path components. This cache alleviates that.
+ try:
+ return _NORM_PATH_CACHE[path]
+ except KeyError:
+ if not path: # don't cache result for ''
+ return _normalize_path(path)
+ result = _NORM_PATH_CACHE[path] = _normalize_path(path)
+ return result
+
+
+def load_module_from_name(dotted_name, path=None, use_sys=True):
+ """Load a Python module from its name.
+
+ :type dotted_name: str
+ :param dotted_name: python name of a module or package
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be
+ used or not
+
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ return load_module_from_modpath(dotted_name.split("."), path, use_sys)
+
+
+def load_module_from_modpath(parts, path=None, use_sys=1):
+ """Load a python module from its split name.
+
+ :type parts: list(str) or tuple(str)
+ :param parts:
+ python name of a module or package split on '.'
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be used or not
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ if use_sys:
+ try:
+ return sys.modules[".".join(parts)]
+ except KeyError:
+ pass
+ modpath = []
+ prevmodule = None
+ for part in parts:
+ modpath.append(part)
+ curname = ".".join(modpath)
+ module = None
+ if len(modpath) != len(parts):
+ # even with use_sys=False, should try to get outer packages from sys.modules
+ module = sys.modules.get(curname)
+ elif use_sys:
+ # because it may have been indirectly loaded through a parent
+ module = sys.modules.get(curname)
+ if module is None:
+ mp_file, mp_filename, mp_desc = imp.find_module(part, path)
+ module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
+ # mp_file still needs to be closed.
+ if mp_file:
+ mp_file.close()
+ if prevmodule:
+ setattr(prevmodule, part, module)
+ _file = getattr(module, "__file__", "")
+ prevmodule = module
+ if not _file and util.is_namespace(curname):
+ continue
+ if not _file and len(modpath) != len(parts):
+ raise ImportError("no module in %s" % ".".join(parts[len(modpath) :]))
+ path = [os.path.dirname(_file)]
+ return module
+
+
+def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None):
+ """Load a Python module from it's path.
+
+ :type filepath: str
+ :param filepath: path to the python module or package
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be
+ used or not
+
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ modpath = modpath_from_file(filepath, extrapath)
+ return load_module_from_modpath(modpath, path, use_sys)
+
+
+def check_modpath_has_init(path, mod_path):
+ """check there are some __init__.py all along the way"""
+ modpath = []
+ for part in mod_path:
+ modpath.append(part)
+ path = os.path.join(path, part)
+ if not _has_init(path):
+ old_namespace = util.is_namespace(".".join(modpath))
+ if not old_namespace:
+ return False
+ return True
+
+
+def _get_relative_base_path(filename, path_to_check):
+ """Extracts the relative mod path of the file to import from
+
+ Check if a file is within the passed in path and if so, returns the
+ relative mod path from the one passed in.
+
+ If the filename is no in path_to_check, returns None
+
+ Note this function will look for both abs and realpath of the file,
+ this allows to find the relative base path even if the file is a
+ symlink of a file in the passed in path
+
+ Examples:
+ _get_relative_base_path("/a/b/c/d.py", "/a/b") -> ["c","d"]
+ _get_relative_base_path("/a/b/c/d.py", "/dev") -> None
+ """
+ importable_path = None
+ path_to_check = os.path.normcase(path_to_check)
+ abs_filename = os.path.abspath(filename)
+ if os.path.normcase(abs_filename).startswith(path_to_check):
+ importable_path = abs_filename
+
+ real_filename = os.path.realpath(filename)
+ if os.path.normcase(real_filename).startswith(path_to_check):
+ importable_path = real_filename
+
+ if importable_path:
+ base_path = os.path.splitext(importable_path)[0]
+ relative_base_path = base_path[len(path_to_check) :]
+ return [pkg for pkg in relative_base_path.split(os.sep) if pkg]
+
+ return None
+
+
+def modpath_from_file_with_callback(filename, extrapath=None, is_package_cb=None):
+ filename = os.path.expanduser(_path_from_filename(filename))
+
+ if extrapath is not None:
+ for path_ in itertools.chain(map(_canonicalize_path, extrapath), extrapath):
+ path = os.path.abspath(path_)
+ if not path:
+ continue
+ submodpath = _get_relative_base_path(filename, path)
+ if not submodpath:
+ continue
+ if is_package_cb(path, submodpath[:-1]):
+ return extrapath[path_].split(".") + submodpath
+
+ for path in itertools.chain(map(_canonicalize_path, sys.path), sys.path):
+ path = _cache_normalize_path(path)
+ if not path:
+ continue
+ modpath = _get_relative_base_path(filename, path)
+ if not modpath:
+ continue
+ if is_package_cb(path, modpath[:-1]):
+ return modpath
+
+ raise ImportError(
+ "Unable to find module for %s in %s" % (filename, ", \n".join(sys.path))
+ )
+
+
+def modpath_from_file(filename, extrapath=None):
+ """given a file path return the corresponding split module's name
+ (i.e name of a module or package split on '.')
+
+ :type filename: str
+ :param filename: file's path for which we want the module's name
+
+ :type extrapath: dict
+ :param extrapath:
+ optional extra search path, with path as key and package name for the path
+ as value. This is usually useful to handle package split in multiple
+ directories using __path__ trick.
+
+
+ :raise ImportError:
+ if the corresponding module's name has not been found
+
+ :rtype: list(str)
+ :return: the corresponding split module's name
+ """
+ return modpath_from_file_with_callback(filename, extrapath, check_modpath_has_init)
+
+
+def file_from_modpath(modpath, path=None, context_file=None):
+ return file_info_from_modpath(modpath, path, context_file).location
+
+
+def file_info_from_modpath(modpath, path=None, context_file=None):
+ """given a mod path (i.e. split module / package name), return the
+ corresponding file, giving priority to source file over precompiled
+ file if it exists
+
+ :type modpath: list or tuple
+ :param modpath:
+ split module's name (i.e name of a module or package split
+ on '.')
+ (this means explicit relative imports that start with dots have
+ empty strings in this list!)
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type context_file: str or None
+ :param context_file:
+ context file to consider, necessary if the identifier has been
+ introduced using a relative import unresolvable in the actual
+ context (i.e. modutils)
+
+ :raise ImportError: if there is no such module in the directory
+
+ :rtype: (str or None, import type)
+ :return:
+ the path to the module's file or None if it's an integrated
+ builtin module such as 'sys'
+ """
+ if context_file is not None:
+ context = os.path.dirname(context_file)
+ else:
+ context = context_file
+ if modpath[0] == "xml":
+ # handle _xmlplus
+ try:
+ return _spec_from_modpath(["_xmlplus"] + modpath[1:], path, context)
+ except ImportError:
+ return _spec_from_modpath(modpath, path, context)
+ elif modpath == ["os", "path"]:
+ # FIXME: currently ignoring search_path...
+ return spec.ModuleSpec(
+ name="os.path", location=os.path.__file__, module_type=imp.PY_SOURCE
+ )
+ return _spec_from_modpath(modpath, path, context)
+
+
+def get_module_part(dotted_name, context_file=None):
+ """given a dotted name return the module part of the name :
+
+ >>> get_module_part('astroid.as_string.dump')
+ 'astroid.as_string'
+
+ :type dotted_name: str
+ :param dotted_name: full name of the identifier we are interested in
+
+ :type context_file: str or None
+ :param context_file:
+ context file to consider, necessary if the identifier has been
+ introduced using a relative import unresolvable in the actual
+ context (i.e. modutils)
+
+
+ :raise ImportError: if there is no such module in the directory
+
+ :rtype: str or None
+ :return:
+ the module part of the name or None if we have not been able at
+ all to import the given name
+
+ XXX: deprecated, since it doesn't handle package precedence over module
+ (see #10066)
+ """
+ # os.path trick
+ if dotted_name.startswith("os.path"):
+ return "os.path"
+ parts = dotted_name.split(".")
+ if context_file is not None:
+ # first check for builtin module which won't be considered latter
+ # in that case (path != None)
+ if parts[0] in BUILTIN_MODULES:
+ if len(parts) > 2:
+ raise ImportError(dotted_name)
+ return parts[0]
+ # don't use += or insert, we want a new list to be created !
+ path = None
+ starti = 0
+ if parts[0] == "":
+ assert (
+ context_file is not None
+ ), "explicit relative import, but no context_file?"
+ path = [] # prevent resolving the import non-relatively
+ starti = 1
+ while parts[starti] == "": # for all further dots: change context
+ starti += 1
+ context_file = os.path.dirname(context_file)
+ for i in range(starti, len(parts)):
+ try:
+ file_from_modpath(
+ parts[starti : i + 1], path=path, context_file=context_file
+ )
+ except ImportError:
+ if i < max(1, len(parts) - 2):
+ raise
+ return ".".join(parts[:i])
+ return dotted_name
+
+
+def get_module_files(src_directory, blacklist, list_all=False):
+ """given a package directory return a list of all available python
+ module's files in the package and its subpackages
+
+ :type src_directory: str
+ :param src_directory:
+ path of the directory corresponding to the package
+
+ :type blacklist: list or tuple
+ :param blacklist: iterable
+ list of files or directories to ignore.
+
+ :type list_all: bool
+ :param list_all:
+ get files from all paths, including ones without __init__.py
+
+ :rtype: list
+ :return:
+ the list of all available python module's files in the package and
+ its subpackages
+ """
+ files = []
+ for directory, dirnames, filenames in os.walk(src_directory):
+ if directory in blacklist:
+ continue
+ _handle_blacklist(blacklist, dirnames, filenames)
+ # check for __init__.py
+ if not list_all and "__init__.py" not in filenames:
+ dirnames[:] = ()
+ continue
+ for filename in filenames:
+ if _is_python_file(filename):
+ src = os.path.join(directory, filename)
+ files.append(src)
+ return files
+
+
+def get_source_file(filename, include_no_ext=False):
+ """given a python module's file name return the matching source file
+ name (the filename will be returned identically if it's already an
+ absolute path to a python source file...)
+
+ :type filename: str
+ :param filename: python module's file name
+
+
+ :raise NoSourceFile: if no source file exists on the file system
+
+ :rtype: str
+ :return: the absolute path of the source file if it exists
+ """
+ filename = os.path.abspath(_path_from_filename(filename))
+ base, orig_ext = os.path.splitext(filename)
+ for ext in PY_SOURCE_EXTS:
+ source_path = "%s.%s" % (base, ext)
+ if os.path.exists(source_path):
+ return source_path
+ if include_no_ext and not orig_ext and os.path.exists(base):
+ return base
+ raise NoSourceFile(filename)
+
+
+def is_python_source(filename):
+ """
+ rtype: bool
+ return: True if the filename is a python source file
+ """
+ return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
+
+
+def is_standard_module(modname, std_path=None):
+ """try to guess if a module is a standard python module (by default,
+ see `std_path` parameter's description)
+
+ :type modname: str
+ :param modname: name of the module we are interested in
+
+ :type std_path: list(str) or tuple(str)
+ :param std_path: list of path considered has standard
+
+
+ :rtype: bool
+ :return:
+ true if the module:
+ - is located on the path listed in one of the directory in `std_path`
+ - is a built-in module
+ """
+ modname = modname.split(".")[0]
+ try:
+ filename = file_from_modpath([modname])
+ except ImportError:
+ # import failed, i'm probably not so wrong by supposing it's
+ # not standard...
+ return False
+ # modules which are not living in a file are considered standard
+ # (sys and __builtin__ for instance)
+ if filename is None:
+ # we assume there are no namespaces in stdlib
+ return not util.is_namespace(modname)
+ filename = _normalize_path(filename)
+ if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)):
+ return False
+ if std_path is None:
+ std_path = STD_LIB_DIRS
+ for path in std_path:
+ if filename.startswith(_cache_normalize_path(path)):
+ return True
+ return False
+
+
+def is_relative(modname, from_file):
+ """return true if the given module name is relative to the given
+ file name
+
+ :type modname: str
+ :param modname: name of the module we are interested in
+
+ :type from_file: str
+ :param from_file:
+ path of the module from which modname has been imported
+
+ :rtype: bool
+ :return:
+ true if the module has been imported relatively to `from_file`
+ """
+ if not os.path.isdir(from_file):
+ from_file = os.path.dirname(from_file)
+ if from_file in sys.path:
+ return False
+ try:
+ stream, _, _ = imp.find_module(modname.split(".")[0], [from_file])
+
+ # Close the stream to avoid ResourceWarnings.
+ if stream:
+ stream.close()
+ return True
+ except ImportError:
+ return False
+
+
+# internal only functions #####################################################
+
+
+def _spec_from_modpath(modpath, path=None, context=None):
+ """given a mod path (i.e. split module / package name), return the
+ corresponding spec
+
+ this function is used internally, see `file_from_modpath`'s
+ documentation for more information
+ """
+ assert modpath
+ location = None
+ if context is not None:
+ try:
+ found_spec = spec.find_spec(modpath, [context])
+ location = found_spec.location
+ except ImportError:
+ found_spec = spec.find_spec(modpath, path)
+ location = found_spec.location
+ else:
+ found_spec = spec.find_spec(modpath, path)
+ if found_spec.type == spec.ModuleType.PY_COMPILED:
+ try:
+ location = get_source_file(found_spec.location)
+ return found_spec._replace(
+ location=location, type=spec.ModuleType.PY_SOURCE
+ )
+ except NoSourceFile:
+ return found_spec._replace(location=location)
+ elif found_spec.type == spec.ModuleType.C_BUILTIN:
+ # integrated builtin module
+ return found_spec._replace(location=None)
+ elif found_spec.type == spec.ModuleType.PKG_DIRECTORY:
+ location = _has_init(found_spec.location)
+ return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE)
+ return found_spec
+
+
+def _is_python_file(filename):
+ """return true if the given filename should be considered as a python file
+
+ .pyc and .pyo are ignored
+ """
+ for ext in (".py", ".so", ".pyd", ".pyw"):
+ if filename.endswith(ext):
+ return True
+ return False
+
+
+def _has_init(directory):
+ """if the given directory has a valid __init__ file, return its path,
+ else return None
+ """
+ mod_or_pack = os.path.join(directory, "__init__")
+ for ext in PY_SOURCE_EXTS + ("pyc", "pyo"):
+ if os.path.exists(mod_or_pack + "." + ext):
+ return mod_or_pack + "." + ext
+ return None
+
+
+def is_namespace(specobj):
+ return specobj.type == spec.ModuleType.PY_NAMESPACE
+
+
+def is_directory(specobj):
+ return specobj.type == spec.ModuleType.PKG_DIRECTORY
diff --git a/basic python programmes/venv/Lib/site-packages/astroid/node_classes.py b/basic python programmes/venv/Lib/site-packages/astroid/node_classes.py
new file mode 100644
index 0000000..7ef6b0a
--- /dev/null
+++ b/basic python programmes/venv/Lib/site-packages/astroid/node_classes.py
@@ -0,0 +1,4676 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2010 Daniel Harding
+# Copyright (c) 2012 FELD Boris
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016-2017 Derek Gustafson
+# Copyright (c) 2016 Jared Garst
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Dave Baum
+# Copyright (c) 2017-2018 Ashley Whetter
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 rr-
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 brendanator
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 HoverHell
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+# pylint: disable=too-many-lines; https://github.com/PyCQA/astroid/issues/465
+
+"""Module for some node classes. More nodes in scoped_nodes.py
+"""
+
+import abc
+import builtins as builtins_mod
+import itertools
+import pprint
+from functools import lru_cache
+from functools import singledispatch as _singledispatch
+
+from astroid import as_string
+from astroid import bases
+from astroid import context as contextmod
+from astroid import decorators
+from astroid import exceptions
+from astroid import manager
+from astroid import mixins
+from astroid import util
+
+
+BUILTINS = builtins_mod.__name__
+MANAGER = manager.AstroidManager()
+
+
+@decorators.raise_if_nothing_inferred
+def unpack_infer(stmt, context=None):
+ """recursively generate nodes inferred by the given statement.
+ If the inferred value is a list or a tuple, recurse on the elements
+ """
+ if isinstance(stmt, (List, Tuple)):
+ for elt in stmt.elts:
+ if elt is util.Uninferable:
+ yield elt
+ continue
+ yield from unpack_infer(elt, context)
+ return dict(node=stmt, context=context)
+ # if inferred is a final node, return it and stop
+ inferred = next(stmt.infer(context))
+ if inferred is stmt:
+ yield inferred
+ return dict(node=stmt, context=context)
+ # else, infer recursively, except Uninferable object that should be returned as is
+ for inferred in stmt.infer(context):
+ if inferred is util.Uninferable:
+ yield inferred
+ else:
+ yield from unpack_infer(inferred, context)
+
+ return dict(node=stmt, context=context)
+
+
+def are_exclusive(
+ stmt1, stmt2, exceptions=None
+): # pylint: disable=redefined-outer-name
+ """return true if the two given statements are mutually exclusive
+
+ `exceptions` may be a list of exception names. If specified, discard If
+ branches and check one of the statement is in an exception handler catching
+ one of the given exceptions.
+
+ algorithm :
+ 1) index stmt1's parents
+ 2) climb among stmt2's parents until we find a common parent
+ 3) if the common parent is a If or TryExcept statement, look if nodes are
+ in exclusive branches
+ """
+ # index stmt1's parents
+ stmt1_parents = {}
+ children = {}
+ node = stmt1.parent
+ previous = stmt1
+ while node:
+ stmt1_parents[node] = 1
+ children[node] = previous
+ previous = node
+ node = node.parent
+ # climb among stmt2's parents until we find a common parent
+ node = stmt2.parent
+ previous = stmt2
+ while node:
+ if node in stmt1_parents:
+ # if the common parent is a If or TryExcept statement, look if
+ # nodes are in exclusive branches
+ if isinstance(node, If) and exceptions is None:
+ if (
+ node.locate_child(previous)[1]
+ is not node.locate_child(children[node])[1]
+ ):
+ return True
+ elif isinstance(node, TryExcept):
+ c2attr, c2node = node.locate_child(previous)
+ c1attr, c1node = node.locate_child(children[node])
+ if c1node is not c2node:
+ first_in_body_caught_by_handlers = (
+ c2attr == "handlers"
+ and c1attr == "body"
+ and previous.catch(exceptions)
+ )
+ second_in_body_caught_by_handlers = (
+ c2attr == "body"
+ and c1attr == "handlers"
+ and children[node].catch(exceptions)
+ )
+ first_in_else_other_in_handlers = (
+ c2attr == "handlers" and c1attr == "orelse"
+ )
+ second_in_else_other_in_handlers = (
+ c2attr == "orelse" and c1attr == "handlers"
+ )
+ if any(
+ (
+ first_in_body_caught_by_handlers,
+ second_in_body_caught_by_handlers,
+ first_in_else_other_in_handlers,
+ second_in_else_other_in_handlers,
+ )
+ ):
+ return True
+ elif c2attr == "handlers" and c1attr == "handlers":
+ return previous is not children[node]
+ return False
+ previous = node
+ node = node.parent
+ return False
+
+
+# getitem() helpers.
+
+_SLICE_SENTINEL = object()
+
+
+def _slice_value(index, context=None):
+ """Get the value of the given slice index."""
+
+ if isinstance(index, Const):
+ if isinstance(index.value, (int, type(None))):
+ return index.value
+ elif index is None:
+ return None
+ else:
+ # Try to infer what the index actually is.
+ # Since we can't return all the possible values,
+ # we'll stop at the first possible value.
+ try:
+ inferred = next(index.infer(context=context))
+ except exceptions.InferenceError:
+ pass
+ else:
+ if isinstance(inferred, Const):
+ if isinstance(inferred.value, (int, type(None))):
+ return inferred.value
+
+ # Use a sentinel, because None can be a valid
+ # value that this function can return,
+ # as it is the case for unspecified bounds.
+ return _SLICE_SENTINEL
+
+
+def _infer_slice(node, context=None):
+ lower = _slice_value(node.lower, context)
+ upper = _slice_value(node.upper, context)
+ step = _slice_value(node.step, context)
+ if all(elem is not _SLICE_SENTINEL for elem in (lower, upper, step)):
+ return slice(lower, upper, step)
+
+ raise exceptions.AstroidTypeError(
+ message="Could not infer slice used in subscript",
+ node=node,
+ index=node.parent,
+ context=context,
+ )
+
+
+def _container_getitem(instance, elts, index, context=None):
+ """Get a slice or an item, using the given *index*, for the given sequence."""
+ try:
+ if isinstance(index, Slice):
+ index_slice = _infer_slice(index, context=context)
+ new_cls = instance.__class__()
+ new_cls.elts = elts[index_slice]
+ new_cls.parent = instance.parent
+ return new_cls
+ if isinstance(index, Const):
+ return elts[index.value]
+ except IndexError as exc:
+ raise exceptions.AstroidIndexError(
+ message="Index {index!s} out of range",
+ node=instance,
+ index=index,
+ context=context,
+ ) from exc
+ except TypeError as exc:
+ raise exceptions.AstroidTypeError(
+ message="Type error {error!r}", node=instance, index=index, context=context
+ ) from exc
+
+ raise exceptions.AstroidTypeError("Could not use %s as subscript index" % index)
+
+
+OP_PRECEDENCE = {
+ op: precedence
+ for precedence, ops in enumerate(
+ [
+ ["Lambda"], # lambda x: x + 1
+ ["IfExp"], # 1 if True else 2
+ ["or"],
+ ["and"],
+ ["not"],
+ ["Compare"], # in, not in, is, is not, <, <=, >, >=, !=, ==
+ ["|"],
+ ["^"],
+ ["&"],
+ ["<<", ">>"],
+ ["+", "-"],
+ ["*", "@", "/", "//", "%"],
+ ["UnaryOp"], # +, -, ~
+ ["**"],
+ ["Await"],
+ ]
+ )
+ for op in ops
+}
+
+
+class NodeNG:
+ """ A node of the new Abstract Syntax Tree (AST).
+
+ This is the base class for all Astroid node classes.
+ """
+
+ is_statement = False
+ """Whether this node indicates a statement.
+
+ :type: bool
+ """
+ optional_assign = False # True for For (and for Comprehension if py <3.0)
+ """Whether this node optionally assigns a variable.
+
+ This is for loop assignments because loop won't necessarily perform an
+ assignment if the loop has no iterations.
+ This is also the case from comprehensions in Python 2.
+
+ :type: bool
+ """
+ is_function = False # True for FunctionDef nodes
+ """Whether this node indicates a function.
+
+ :type: bool
+ """
+ is_lambda = False
+ # Attributes below are set by the builder module or by raw factories
+ lineno = None
+ """The line that this node appears on in the source code.
+
+ :type: int or None
+ """
+ col_offset = None
+ """The column that this node appears on in the source code.
+
+ :type: int or None
+ """
+ parent = None
+ """The parent node in the syntax tree.
+
+ :type: NodeNG or None
+ """
+ _astroid_fields = ()
+ """Node attributes that contain child nodes.
+
+ This is redefined in most concrete classes.
+
+ :type: tuple(str)
+ """
+ _other_fields = ()
+ """Node attributes that do not contain child nodes.
+
+ :type: tuple(str)
+ """
+ _other_other_fields = ()
+ """Attributes that contain AST-dependent fields.
+
+ :type: tuple(str)
+ """
+ # instance specific inference function infer(node, context)
+ _explicit_inference = None
+
+ def __init__(self, lineno=None, col_offset=None, parent=None):
+ """
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.lineno = lineno
+ self.col_offset = col_offset
+ self.parent = parent
+
+ def infer(self, context=None, **kwargs):
+ """Get a generator of the inferred values.
+
+ This is the main entry point to the inference system.
+
+ .. seealso:: :ref:`inference`
+
+ If the instance has some explicit inference function set, it will be
+ called instead of the default interface.
+
+ :returns: The inferred values.
+ :rtype: iterable
+ """
+ if context is not None:
+ context = context.extra_context.get(self, context)
+ if self._explicit_inference is not None:
+ # explicit_inference is not bound, give it self explicitly
+ try:
+ # pylint: disable=not-callable
+ return self._explicit_inference(self, context, **kwargs)
+ except exceptions.UseInferenceDefault:
+ pass
+
+ if not context:
+ return self._infer(context, **kwargs)
+
+ key = (self, context.lookupname, context.callcontext, context.boundnode)
+ if key in context.inferred:
+ return iter(context.inferred[key])
+
+ gen = context.cache_generator(key, self._infer(context, **kwargs))
+ return util.limit_inference(gen, MANAGER.max_inferable_values)
+
+ def _repr_name(self):
+ """Get a name for nice representation.
+
+ This is either :attr:`name`, :attr:`attrname`, or the empty string.
+
+ :returns: The nice name.
+ :rtype: str
+ """
+ names = {"name", "attrname"}
+ if all(name not in self._astroid_fields for name in names):
+ return getattr(self, "name", getattr(self, "attrname", ""))
+ return ""
+
+ def __str__(self):
+ rname = self._repr_name()
+ cname = type(self).__name__
+ if rname:
+ string = "%(cname)s.%(rname)s(%(fields)s)"
+ alignment = len(cname) + len(rname) + 2
+ else:
+ string = "%(cname)s(%(fields)s)"
+ alignment = len(cname) + 1
+ result = []
+ for field in self._other_fields + self._astroid_fields:
+ value = getattr(self, field)
+ width = 80 - len(field) - alignment
+ lines = pprint.pformat(value, indent=2, width=width).splitlines(True)
+
+ inner = [lines[0]]
+ for line in lines[1:]:
+ inner.append(" " * alignment + line)
+ result.append("%s=%s" % (field, "".join(inner)))
+
+ return string % {
+ "cname": cname,
+ "rname": rname,
+ "fields": (",\n" + " " * alignment).join(result),
+ }
+
+ def __repr__(self):
+ rname = self._repr_name()
+ if rname:
+ string = "<%(cname)s.%(rname)s l.%(lineno)s at 0x%(id)x>"
+ else:
+ string = "<%(cname)s l.%(lineno)s at 0x%(id)x>"
+ return string % {
+ "cname": type(self).__name__,
+ "rname": rname,
+ "lineno": self.fromlineno,
+ "id": id(self),
+ }
+
+ def accept(self, visitor):
+ """Visit this node using the given visitor."""
+ func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
+ return func(self)
+
+ def get_children(self):
+ """Get the child nodes below this node.
+
+ :returns: The children.
+ :rtype: iterable(NodeNG)
+ """
+ for field in self._astroid_fields:
+ attr = getattr(self, field)
+ if attr is None:
+ continue
+ if isinstance(attr, (list, tuple)):
+ yield from attr
+ else:
+ yield attr
+
+ def last_child(self):
+ """An optimized version of list(get_children())[-1]
+
+ :returns: The last child, or None if no children exist.
+ :rtype: NodeNG or None
+ """
+ for field in self._astroid_fields[::-1]:
+ attr = getattr(self, field)
+ if not attr: # None or empty listy / tuple
+ continue
+ if isinstance(attr, (list, tuple)):
+ return attr[-1]
+
+ return attr
+ return None
+
+ def parent_of(self, node):
+ """Check if this node is the parent of the given node.
+
+ :param node: The node to check if it is the child.
+ :type node: NodeNG
+
+ :returns: True if this node is the parent of the given node,
+ False otherwise.
+ :rtype: bool
+ """
+ parent = node.parent
+ while parent is not None:
+ if self is parent:
+ return True
+ parent = parent.parent
+ return False
+
+ def statement(self):
+ """The first parent node, including self, marked as statement node.
+
+ :returns: The first parent statement.
+ :rtype: NodeNG
+ """
+ if self.is_statement:
+ return self
+ return self.parent.statement()
+
+ def frame(self):
+ """The first parent frame node.
+
+ A frame node is a :class:`Module`, :class:`FunctionDef`,
+ or :class:`ClassDef`.
+
+ :returns: The first parent frame node.
+ :rtype: Module or FunctionDef or ClassDef
+ """
+ return self.parent.frame()
+
+ def scope(self):
+ """The first parent node defining a new scope.
+
+ :returns: The first parent scope node.
+ :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr
+ """
+ return self.parent.scope()
+
+ def root(self):
+ """Return the root node of the syntax tree.
+
+ :returns: The root node.
+ :rtype: Module
+ """
+ if self.parent:
+ return self.parent.root()
+ return self
+
+ def child_sequence(self, child):
+ """Search for the sequence that contains this child.
+
+ :param child: The child node to search sequences for.
+ :type child: NodeNG
+
+ :returns: The sequence containing the given child node.
+ :rtype: iterable(NodeNG)
+
+ :raises AstroidError: If no sequence could be found that contains
+ the given child.
+ """
+ for field in self._astroid_fields:
+ node_or_sequence = getattr(self, field)
+ if node_or_sequence is child:
+ return [node_or_sequence]
+ # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
+ if (
+ isinstance(node_or_sequence, (tuple, list))
+ and child in node_or_sequence
+ ):
+ return node_or_sequence
+
+ msg = "Could not find %s in %s's children"
+ raise exceptions.AstroidError(msg % (repr(child), repr(self)))
+
+ def locate_child(self, child):
+ """Find the field of this node that contains the given child.
+
+ :param child: The child node to search fields for.
+ :type child: NodeNG
+
+ :returns: A tuple of the name of the field that contains the child,
+ and the sequence or node that contains the child node.
+ :rtype: tuple(str, iterable(NodeNG) or NodeNG)
+
+ :raises AstroidError: If no field could be found that contains
+ the given child.
+ """
+ for field in self._astroid_fields:
+ node_or_sequence = getattr(self, field)
+ # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
+ if child is node_or_sequence:
+ return field, child
+ if (
+ isinstance(node_or_sequence, (tuple, list))
+ and child in node_or_sequence
+ ):
+ return field, node_or_sequence
+ msg = "Could not find %s in %s's children"
+ raise exceptions.AstroidError(msg % (repr(child), repr(self)))
+
+ # FIXME : should we merge child_sequence and locate_child ? locate_child
+ # is only used in are_exclusive, child_sequence one time in pylint.
+
+ def next_sibling(self):
+ """The next sibling statement node.
+
+ :returns: The next sibling statement node.
+ :rtype: NodeNG or None
+ """
+ return self.parent.next_sibling()
+
+ def previous_sibling(self):
+ """The previous sibling statement.
+
+ :returns: The previous sibling statement node.
+ :rtype: NodeNG or None
+ """
+ return self.parent.previous_sibling()
+
+ def nearest(self, nodes):
+ """Get the node closest to this one from the given list of nodes.
+
+ :param nodes: The list of nodes to search. All of these nodes must
+ belong to the same module as this one. The list should be
+ sorted by the line number of the nodes, smallest first.
+ :type nodes: iterable(NodeNG)
+
+ :returns: The node closest to this one in the source code,
+ or None if one could not be found.
+ :rtype: NodeNG or None
+ """
+ myroot = self.root()
+ mylineno = self.fromlineno
+ nearest = None, 0
+ for node in nodes:
+ assert node.root() is myroot, (
+ "nodes %s and %s are not from the same module" % (self, node)
+ )
+ lineno = node.fromlineno
+ if node.fromlineno > mylineno:
+ break
+ if lineno > nearest[1]:
+ nearest = node, lineno
+ # FIXME: raise an exception if nearest is None ?
+ return nearest[0]
+
+ # these are lazy because they're relatively expensive to compute for every
+ # single node, and they rarely get looked at
+
+ @decorators.cachedproperty
+ def fromlineno(self):
+ """The first line that this node appears on in the source code.
+
+ :type: int or None
+ """
+ if self.lineno is None:
+ return self._fixed_source_line()
+
+ return self.lineno
+
+ @decorators.cachedproperty
+ def tolineno(self):
+ """The last line that this node appears on in the source code.
+
+ :type: int or None
+ """
+ if not self._astroid_fields:
+ # can't have children
+ lastchild = None
+ else:
+ lastchild = self.last_child()
+ if lastchild is None:
+ return self.fromlineno
+
+ return lastchild.tolineno
+
+ def _fixed_source_line(self):
+ """Attempt to find the line that this node appears on.
+
+ We need this method since not all nodes have :attr:`lineno` set.
+
+ :returns: The line number of this node,
+ or None if this could not be determined.
+ :rtype: int or None
+ """
+ line = self.lineno
+ _node = self
+ try:
+ while line is None:
+ _node = next(_node.get_children())
+ line = _node.lineno
+ except StopIteration:
+ _node = self.parent
+ while _node and line is None:
+ line = _node.lineno
+ _node = _node.parent
+ return line
+
+ def block_range(self, lineno):
+ """Get a range from the given line number to where this node ends.
+
+ :param lineno: The line number to start the range at.
+ :type lineno: int
+
+ :returns: The range of line numbers that this node belongs to,
+ starting at the given line number.
+ :rtype: tuple(int, int or None)
+ """
+ return lineno, self.tolineno
+
+ def set_local(self, name, stmt):
+ """Define that the given name is declared in the given statement node.
+
+ This definition is stored on the parent scope node.
+
+ .. seealso:: :meth:`scope`
+
+ :param name: The name that is being defined.
+ :type name: str
+
+ :param stmt: The statement that defines the given name.
+ :type stmt: NodeNG
+ """
+ self.parent.set_local(name, stmt)
+
+ def nodes_of_class(self, klass, skip_klass=None):
+ """Get the nodes (including this one or below) of the given type.
+
+ :param klass: The type of node to search for.
+ :type klass: builtins.type
+
+ :param skip_klass: A type of node to ignore. This is useful to ignore
+ subclasses of :attr:`klass`.
+ :type skip_klass: builtins.type
+
+ :returns: The node of the given type.
+ :rtype: iterable(NodeNG)
+ """
+ if isinstance(self, klass):
+ yield self
+
+ if skip_klass is None:
+ for child_node in self.get_children():
+ yield from child_node.nodes_of_class(klass, skip_klass)
+
+ return
+
+ for child_node in self.get_children():
+ if isinstance(child_node, skip_klass):
+ continue
+ yield from child_node.nodes_of_class(klass, skip_klass)
+
+ @decorators.cached
+ def _get_assign_nodes(self):
+ return []
+
+ def _get_name_nodes(self):
+ for child_node in self.get_children():
+ yield from child_node._get_name_nodes()
+
+ def _get_return_nodes_skip_functions(self):
+ yield from ()
+
+ def _get_yield_nodes_skip_lambdas(self):
+ yield from ()
+
+ def _infer_name(self, frame, name):
+ # overridden for ImportFrom, Import, Global, TryExcept and Arguments
+ pass
+
+ def _infer(self, context=None):
+ """we don't know how to resolve a statement by default"""
+ # this method is overridden by most concrete classes
+ raise exceptions.InferenceError(
+ "No inference function for {node!r}.", node=self, context=context
+ )
+
+ def inferred(self):
+ """Get a list of the inferred values.
+
+ .. seealso:: :ref:`inference`
+
+ :returns: The inferred values.
+ :rtype: list
+ """
+ return list(self.infer())
+
+ def instantiate_class(self):
+ """Instantiate an instance of the defined class.
+
+ .. note::
+
+ On anything other than a :class:`ClassDef` this will return self.
+
+ :returns: An instance of the defined class.
+ :rtype: object
+ """
+ return self
+
+ def has_base(self, node):
+ """Check if this node inherits from the given type.
+
+ :param node: The node defining the base to look for.
+ Usually this is a :class:`Name` node.
+ :type node: NodeNG
+ """
+ return False
+
+ def callable(self):
+ """Whether this node defines something that is callable.
+
+ :returns: True if this defines something that is callable,
+ False otherwise.
+ :rtype: bool
+ """
+ return False
+
+ def eq(self, value):
+ return False
+
+ def as_string(self):
+ """Get the source code that this node represents.
+
+ :returns: The source code.
+ :rtype: str
+ """
+ return as_string.to_code(self)
+
+ def repr_tree(
+ self,
+ ids=False,
+ include_linenos=False,
+ ast_state=False,
+ indent=" ",
+ max_depth=0,
+ max_width=80,
+ ):
+ """Get a string representation of the AST from this node.
+
+ :param ids: If true, includes the ids with the node type names.
+ :type ids: bool
+
+ :param include_linenos: If true, includes the line numbers and
+ column offsets.
+ :type include_linenos: bool
+
+ :param ast_state: If true, includes information derived from
+ the whole AST like local and global variables.
+ :type ast_state: bool
+
+ :param indent: A string to use to indent the output string.
+ :type indent: str
+
+ :param max_depth: If set to a positive integer, won't return
+ nodes deeper than max_depth in the string.
+ :type max_depth: int
+
+ :param max_width: Attempt to format the output string to stay
+ within this number of characters, but can exceed it under some
+ circumstances. Only positive integer values are valid, the default is 80.
+ :type max_width: int
+
+ :returns: The string representation of the AST.
+ :rtype: str
+ """
+ # pylint: disable=too-many-statements
+ @_singledispatch
+ def _repr_tree(node, result, done, cur_indent="", depth=1):
+ """Outputs a representation of a non-tuple/list, non-node that's
+ contained within an AST, including strings.
+ """
+ lines = pprint.pformat(
+ node, width=max(max_width - len(cur_indent), 1)
+ ).splitlines(True)
+ result.append(lines[0])
+ result.extend([cur_indent + line for line in lines[1:]])
+ return len(lines) != 1
+
+ # pylint: disable=unused-variable; doesn't understand singledispatch
+ @_repr_tree.register(tuple)
+ @_repr_tree.register(list)
+ def _repr_seq(node, result, done, cur_indent="", depth=1):
+ """Outputs a representation of a sequence that's contained within an AST."""
+ cur_indent += indent
+ result.append("[")
+ if not node:
+ broken = False
+ elif len(node) == 1:
+ broken = _repr_tree(node[0], result, done, cur_indent, depth)
+ elif len(node) == 2:
+ broken = _repr_tree(node[0], result, done, cur_indent, depth)
+ if not broken:
+ result.append(", ")
+ else:
+ result.append(",\n")
+ result.append(cur_indent)
+ broken = _repr_tree(node[1], result, done, cur_indent, depth) or broken
+ else:
+ result.append("\n")
+ result.append(cur_indent)
+ for child in node[:-1]:
+ _repr_tree(child, result, done, cur_indent, depth)
+ result.append(",\n")
+ result.append(cur_indent)
+ _repr_tree(node[-1], result, done, cur_indent, depth)
+ broken = True
+ result.append("]")
+ return broken
+
+ # pylint: disable=unused-variable; doesn't understand singledispatch
+ @_repr_tree.register(NodeNG)
+ def _repr_node(node, result, done, cur_indent="", depth=1):
+ """Outputs a strings representation of an astroid node."""
+ if node in done:
+ result.append(
+ indent
+ + " max_depth:
+ result.append("...")
+ return False
+ depth += 1
+ cur_indent += indent
+ if ids:
+ result.append("%s<0x%x>(\n" % (type(node).__name__, id(node)))
+ else:
+ result.append("%s(" % type(node).__name__)
+ fields = []
+ if include_linenos:
+ fields.extend(("lineno", "col_offset"))
+ fields.extend(node._other_fields)
+ fields.extend(node._astroid_fields)
+ if ast_state:
+ fields.extend(node._other_other_fields)
+ if not fields:
+ broken = False
+ elif len(fields) == 1:
+ result.append("%s=" % fields[0])
+ broken = _repr_tree(
+ getattr(node, fields[0]), result, done, cur_indent, depth
+ )
+ else:
+ result.append("\n")
+ result.append(cur_indent)
+ for field in fields[:-1]:
+ result.append("%s=" % field)
+ _repr_tree(getattr(node, field), result, done, cur_indent, depth)
+ result.append(",\n")
+ result.append(cur_indent)
+ result.append("%s=" % fields[-1])
+ _repr_tree(getattr(node, fields[-1]), result, done, cur_indent, depth)
+ broken = True
+ result.append(")")
+ return broken
+
+ result = []
+ _repr_tree(self, result, set())
+ return "".join(result)
+
+ def bool_value(self):
+ """Determine the boolean value of this node.
+
+ The boolean value of a node can have three
+ possible values:
+
+ * False: For instance, empty data structures,
+ False, empty strings, instances which return
+ explicitly False from the __nonzero__ / __bool__
+ method.
+ * True: Most of constructs are True by default:
+ classes, functions, modules etc
+ * Uninferable: The inference engine is uncertain of the
+ node's value.
+
+ :returns: The boolean value of this node.
+ :rtype: bool or Uninferable
+ """
+ return util.Uninferable
+
+ def op_precedence(self):
+ # Look up by class name or default to highest precedence
+ return OP_PRECEDENCE.get(self.__class__.__name__, len(OP_PRECEDENCE))
+
+ def op_left_associative(self):
+ # Everything is left associative except `**` and IfExp
+ return True
+
+
+class Statement(NodeNG):
+ """Statement node adding a few attributes"""
+
+ is_statement = True
+ """Whether this node indicates a statement.
+
+ :type: bool
+ """
+
+ def next_sibling(self):
+ """The next sibling statement node.
+
+ :returns: The next sibling statement node.
+ :rtype: NodeNG or None
+ """
+ stmts = self.parent.child_sequence(self)
+ index = stmts.index(self)
+ try:
+ return stmts[index + 1]
+ except IndexError:
+ pass
+
+ def previous_sibling(self):
+ """The previous sibling statement.
+
+ :returns: The previous sibling statement node.
+ :rtype: NodeNG or None
+ """
+ stmts = self.parent.child_sequence(self)
+ index = stmts.index(self)
+ if index >= 1:
+ return stmts[index - 1]
+ return None
+
+
+class _BaseContainer(
+ mixins.ParentAssignTypeMixin, NodeNG, bases.Instance, metaclass=abc.ABCMeta
+):
+ """Base class for Set, FrozenSet, Tuple and List."""
+
+ _astroid_fields = ("elts",)
+
+ def __init__(self, lineno=None, col_offset=None, parent=None):
+ """
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.elts = []
+ """The elements in the node.
+
+ :type: list(NodeNG)
+ """
+
+ super(_BaseContainer, self).__init__(lineno, col_offset, parent)
+
+ def postinit(self, elts):
+ """Do some setup after initialisation.
+
+ :param elts: The list of elements the that node contains.
+ :type elts: list(NodeNG)
+ """
+ self.elts = elts
+
+ @classmethod
+ def from_constants(cls, elts=None):
+ """Create a node of this type from the given list of elements.
+
+ :param elts: The list of elements that the node should contain.
+ :type elts: list(NodeNG)
+
+ :returns: A new node containing the given elements.
+ :rtype: NodeNG
+ """
+ node = cls()
+ if elts is None:
+ node.elts = []
+ else:
+ node.elts = [const_factory(e) for e in elts]
+ return node
+
+ def itered(self):
+ """An iterator over the elements this node contains.
+
+ :returns: The contents of this node.
+ :rtype: iterable(NodeNG)
+ """
+ return self.elts
+
+ def bool_value(self):
+ """Determine the boolean value of this node.
+
+ :returns: The boolean value of this node.
+ :rtype: bool or Uninferable
+ """
+ return bool(self.elts)
+
+ @abc.abstractmethod
+ def pytype(self):
+ """Get the name of the type that this node represents.
+
+ :returns: The name of the type.
+ :rtype: str
+ """
+
+ def get_children(self):
+ yield from self.elts
+
+
+class LookupMixIn:
+ """Mixin to look up a name in the right scope."""
+
+ @lru_cache(maxsize=None)
+ def lookup(self, name):
+ """Lookup where the given variable is assigned.
+
+ The lookup starts from self's scope. If self is not a frame itself
+ and the name is found in the inner frame locals, statements will be
+ filtered to remove ignorable statements according to self's location.
+
+ :param name: The name of the variable to find assignments for.
+ :type name: str
+
+ :returns: The scope node and the list of assignments associated to the
+ given name according to the scope where it has been found (locals,
+ globals or builtin).
+ :rtype: tuple(str, list(NodeNG))
+ """
+ return self.scope().scope_lookup(self, name)
+
+ def ilookup(self, name):
+ """Lookup the inferred values of the given variable.
+
+ :param name: The variable name to find values for.
+ :type name: str
+
+ :returns: The inferred values of the statements returned from
+ :meth:`lookup`.
+ :rtype: iterable
+ """
+ frame, stmts = self.lookup(name)
+ context = contextmod.InferenceContext()
+ return bases._infer_stmts(stmts, context, frame)
+
+ def _filter_stmts(self, stmts, frame, offset):
+ """Filter the given list of statements to remove ignorable statements.
+
+ If self is not a frame itself and the name is found in the inner
+ frame locals, statements will be filtered to remove ignorable
+ statements according to self's location.
+
+ :param stmts: The statements to filter.
+ :type stmts: list(NodeNG)
+
+ :param frame: The frame that all of the given statements belong to.
+ :type frame: NodeNG
+
+ :param offset: The line offset to filter statements up to.
+ :type offset: int
+
+ :returns: The filtered statements.
+ :rtype: list(NodeNG)
+ """
+ # if offset == -1, my actual frame is not the inner frame but its parent
+ #
+ # class A(B): pass
+ #
+ # we need this to resolve B correctly
+ if offset == -1:
+ myframe = self.frame().parent.frame()
+ else:
+ myframe = self.frame()
+ # If the frame of this node is the same as the statement
+ # of this node, then the node is part of a class or
+ # a function definition and the frame of this node should be the
+ # the upper frame, not the frame of the definition.
+ # For more information why this is important,
+ # see Pylint issue #295.
+ # For example, for 'b', the statement is the same
+ # as the frame / scope:
+ #
+ # def test(b=1):
+ # ...
+
+ if self.statement() is myframe and myframe.parent:
+ myframe = myframe.parent.frame()
+ mystmt = self.statement()
+ # line filtering if we are in the same frame
+ #
+ # take care node may be missing lineno information (this is the case for
+ # nodes inserted for living objects)
+ if myframe is frame and mystmt.fromlineno is not None:
+ assert mystmt.fromlineno is not None, mystmt
+ mylineno = mystmt.fromlineno + offset
+ else:
+ # disabling lineno filtering
+ mylineno = 0
+ _stmts = []
+ _stmt_parents = []
+ for node in stmts:
+ stmt = node.statement()
+ # line filtering is on and we have reached our location, break
+ if stmt.fromlineno > mylineno > 0:
+ break
+ # Ignore decorators with the same name as the
+ # decorated function
+ # Fixes issue #375
+ if mystmt is stmt and is_from_decorator(self):
+ continue
+ assert hasattr(node, "assign_type"), (
+ node,
+ node.scope(),
+ node.scope().locals,
+ )
+ assign_type = node.assign_type()
+ if node.has_base(self):
+ break
+
+ _stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt)
+ if done:
+ break
+
+ optional_assign = assign_type.optional_assign
+ if optional_assign and assign_type.parent_of(self):
+ # we are inside a loop, loop var assignment is hiding previous
+ # assignment
+ _stmts = [node]
+ _stmt_parents = [stmt.parent]
+ continue
+
+ # XXX comment various branches below!!!
+ try:
+ pindex = _stmt_parents.index(stmt.parent)
+ except ValueError:
+ pass
+ else:
+ # we got a parent index, this means the currently visited node
+ # is at the same block level as a previously visited node
+ if _stmts[pindex].assign_type().parent_of(assign_type):
+ # both statements are not at the same block level
+ continue
+ # if currently visited node is following previously considered
+ # assignment and both are not exclusive, we can drop the
+ # previous one. For instance in the following code ::
+ #
+ # if a:
+ # x = 1
+ # else:
+ # x = 2
+ # print x
+ #
+ # we can't remove neither x = 1 nor x = 2 when looking for 'x'
+ # of 'print x'; while in the following ::
+ #
+ # x = 1
+ # x = 2
+ # print x
+ #
+ # we can remove x = 1 when we see x = 2
+ #
+ # moreover, on loop assignment types, assignment won't
+ # necessarily be done if the loop has no iteration, so we don't
+ # want to clear previous assignments if any (hence the test on
+ # optional_assign)
+ if not (optional_assign or are_exclusive(_stmts[pindex], node)):
+ del _stmt_parents[pindex]
+ del _stmts[pindex]
+ if isinstance(node, AssignName):
+ if not optional_assign and stmt.parent is mystmt.parent:
+ _stmts = []
+ _stmt_parents = []
+ elif isinstance(node, DelName):
+ _stmts = []
+ _stmt_parents = []
+ continue
+ if not are_exclusive(self, node):
+ _stmts.append(node)
+ _stmt_parents.append(stmt.parent)
+ return _stmts
+
+
+# Name classes
+
+
+class AssignName(
+ mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG
+):
+ """Variation of :class:`ast.Assign` representing assignment to a name.
+
+ An :class:`AssignName` is the name of something that is assigned to.
+ This includes variables defined in a function signature or in a loop.
+
+ >>> node = astroid.extract_node('variable = range(10)')
+ >>> node
+
+ >>> list(node.get_children())
+ [, ]
+ >>> list(node.get_children())[0].as_string()
+ 'variable'
+ """
+
+ _other_fields = ("name",)
+
+ def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param name: The name that is assigned to.
+ :type name: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.name = name
+ """The name that is assigned to.
+
+ :type: str or None
+ """
+
+ super(AssignName, self).__init__(lineno, col_offset, parent)
+
+
+class DelName(
+ mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG
+):
+ """Variation of :class:`ast.Delete` representing deletion of a name.
+
+ A :class:`DelName` is the name of something that is deleted.
+
+ >>> node = astroid.extract_node("del variable #@")
+ >>> list(node.get_children())
+ []
+ >>> list(node.get_children())[0].as_string()
+ 'variable'
+ """
+
+ _other_fields = ("name",)
+
+ def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param name: The name that is being deleted.
+ :type name: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.name = name
+ """The name that is being deleted.
+
+ :type: str or None
+ """
+
+ super(DelName, self).__init__(lineno, col_offset, parent)
+
+
+class Name(mixins.NoChildrenMixin, LookupMixIn, NodeNG):
+ """Class representing an :class:`ast.Name` node.
+
+ A :class:`Name` node is something that is named, but not covered by
+ :class:`AssignName` or :class:`DelName`.
+
+ >>> node = astroid.extract_node('range(10)')
+ >>> node
+
+ >>> list(node.get_children())
+ [, ]
+ >>> list(node.get_children())[0].as_string()
+ 'range'
+ """
+
+ _other_fields = ("name",)
+
+ def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param name: The name that this node refers to.
+ :type name: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.name = name
+ """The name that this node refers to.
+
+ :type: str or None
+ """
+
+ super(Name, self).__init__(lineno, col_offset, parent)
+
+ def _get_name_nodes(self):
+ yield self
+
+ for child_node in self.get_children():
+ yield from child_node._get_name_nodes()
+
+
+class Arguments(mixins.AssignTypeMixin, NodeNG):
+ """Class representing an :class:`ast.arguments` node.
+
+ An :class:`Arguments` node represents that arguments in a
+ function definition.
+
+ >>> node = astroid.extract_node('def foo(bar): pass')
+ >>> node
+
+ >>> node.args
+
+ """
+
+ # Python 3.4+ uses a different approach regarding annotations,
+ # each argument is a new class, _ast.arg, which exposes an
+ # 'annotation' attribute. In astroid though, arguments are exposed
+ # as is in the Arguments node and the only way to expose annotations
+ # is by using something similar with Python 3.3:
+ # - we expose 'varargannotation' and 'kwargannotation' of annotations
+ # of varargs and kwargs.
+ # - we expose 'annotation', a list with annotations for
+ # for each normal argument. If an argument doesn't have an
+ # annotation, its value will be None.
+
+ _astroid_fields = (
+ "args",
+ "defaults",
+ "kwonlyargs",
+ "kw_defaults",
+ "annotations",
+ "varargannotation",
+ "kwargannotation",
+ "kwonlyargs_annotations",
+ )
+ varargannotation = None
+ """The type annotation for the variable length arguments.
+
+ :type: NodeNG
+ """
+ kwargannotation = None
+ """The type annotation for the variable length keyword arguments.
+
+ :type: NodeNG
+ """
+
+ _other_fields = ("vararg", "kwarg")
+
+ def __init__(self, vararg=None, kwarg=None, parent=None):
+ """
+ :param vararg: The name of the variable length arguments.
+ :type vararg: str or None
+
+ :param kwarg: The name of the variable length keyword arguments.
+ :type kwarg: str or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ super(Arguments, self).__init__(parent=parent)
+ self.vararg = vararg
+ """The name of the variable length arguments.
+
+ :type: str or None
+ """
+
+ self.kwarg = kwarg
+ """The name of the variable length keyword arguments.
+
+ :type: str or None
+ """
+
+ self.args = []
+ """The names of the required arguments.
+
+ :type: list(AssignName)
+ """
+
+ self.defaults = []
+ """The default values for arguments that can be passed positionally.
+
+ :type: list(NodeNG)
+ """
+
+ self.kwonlyargs = []
+ """The keyword arguments that cannot be passed positionally.
+
+ :type: list(AssignName)
+ """
+
+ self.kw_defaults = []
+ """The default values for keyword arguments that cannot be passed positionally.
+
+ :type: list(NodeNG)
+ """
+
+ self.annotations = []
+ """The type annotations of arguments that can be passed positionally.
+
+ :type: list(NodeNG)
+ """
+
+ self.kwonlyargs_annotations = []
+ """The type annotations of arguments that cannot be passed positionally.
+
+ :type: list(NodeNG)
+ """
+
+ def postinit(
+ self,
+ args,
+ defaults,
+ kwonlyargs,
+ kw_defaults,
+ annotations,
+ kwonlyargs_annotations=None,
+ varargannotation=None,
+ kwargannotation=None,
+ ):
+ """Do some setup after initialisation.
+
+ :param args: The names of the required arguments.
+ :type args: list(AssignName)
+
+ :param defaults: The default values for arguments that can be passed
+ positionally.
+ :type defaults: list(NodeNG)
+
+ :param kwonlyargs: The keyword arguments that cannot be passed
+ positionally.
+ :type kwonlyargs: list(AssignName)
+
+ :param kw_defaults: The default values for keyword arguments that
+ cannot be passed positionally.
+ :type kw_defaults: list(NodeNG)
+
+ :param annotations: The type annotations of arguments that can be
+ passed positionally.
+ :type annotations: list(NodeNG)
+
+ :param kwonlyargs_annotations: The type annotations of arguments that
+ cannot be passed positionally. This should always be passed in
+ Python 3.
+ :type kwonlyargs_annotations: list(NodeNG)
+
+ :param varargannotation: The type annotation for the variable length
+ arguments.
+ :type varargannotation: NodeNG
+
+ :param kwargannotation: The type annotation for the variable length
+ keyword arguments.
+ :type kwargannotation: NodeNG
+ """
+ self.args = args
+ self.defaults = defaults
+ self.kwonlyargs = kwonlyargs
+ self.kw_defaults = kw_defaults
+ self.annotations = annotations
+ self.kwonlyargs_annotations = kwonlyargs_annotations
+ self.varargannotation = varargannotation
+ self.kwargannotation = kwargannotation
+
+ def _infer_name(self, frame, name):
+ if self.parent is frame:
+ return name
+ return None
+
+ @decorators.cachedproperty
+ def fromlineno(self):
+ """The first line that this node appears on in the source code.
+
+ :type: int or None
+ """
+ lineno = super(Arguments, self).fromlineno
+ return max(lineno, self.parent.fromlineno or 0)
+
+ def format_args(self):
+ """Get the arguments formatted as string.
+
+ :returns: The formatted arguments.
+ :rtype: str
+ """
+ result = []
+ if self.args:
+ result.append(
+ _format_args(
+ self.args, self.defaults, getattr(self, "annotations", None)
+ )
+ )
+ if self.vararg:
+ result.append("*%s" % self.vararg)
+ if self.kwonlyargs:
+ if not self.vararg:
+ result.append("*")
+ result.append(
+ _format_args(
+ self.kwonlyargs, self.kw_defaults, self.kwonlyargs_annotations
+ )
+ )
+ if self.kwarg:
+ result.append("**%s" % self.kwarg)
+ return ", ".join(result)
+
+ def default_value(self, argname):
+ """Get the default value for an argument.
+
+ :param argname: The name of the argument to get the default value for.
+ :type argname: str
+
+ :raises NoDefault: If there is no default value defined for the
+ given argument.
+ """
+ i = _find_arg(argname, self.args)[0]
+ if i is not None:
+ idx = i - (len(self.args) - len(self.defaults))
+ if idx >= 0:
+ return self.defaults[idx]
+ i = _find_arg(argname, self.kwonlyargs)[0]
+ if i is not None and self.kw_defaults[i] is not None:
+ return self.kw_defaults[i]
+ raise exceptions.NoDefault(func=self.parent, name=argname)
+
+ def is_argument(self, name):
+ """Check if the given name is defined in the arguments.
+
+ :param name: The name to check for.
+ :type name: str
+
+ :returns: True if the given name is defined in the arguments,
+ False otherwise.
+ :rtype: bool
+ """
+ if name == self.vararg:
+ return True
+ if name == self.kwarg:
+ return True
+ return (
+ self.find_argname(name, True)[1] is not None
+ or self.kwonlyargs
+ and _find_arg(name, self.kwonlyargs, True)[1] is not None
+ )
+
+ def find_argname(self, argname, rec=False):
+ """Get the index and :class:`AssignName` node for given name.
+
+ :param argname: The name of the argument to search for.
+ :type argname: str
+
+ :param rec: Whether or not to include arguments in unpacked tuples
+ in the search.
+ :type rec: bool
+
+ :returns: The index and node for the argument.
+ :rtype: tuple(str or None, AssignName or None)
+ """
+ if self.args: # self.args may be None in some cases (builtin function)
+ return _find_arg(argname, self.args, rec)
+ return None, None
+
+ def get_children(self):
+ yield from self.args or ()
+
+ yield from self.defaults
+ yield from self.kwonlyargs
+
+ for elt in self.kw_defaults:
+ if elt is not None:
+ yield elt
+
+ for elt in self.annotations:
+ if elt is not None:
+ yield elt
+
+ if self.varargannotation is not None:
+ yield self.varargannotation
+
+ if self.kwargannotation is not None:
+ yield self.kwargannotation
+
+ for elt in self.kwonlyargs_annotations:
+ if elt is not None:
+ yield elt
+
+
+def _find_arg(argname, args, rec=False):
+ for i, arg in enumerate(args):
+ if isinstance(arg, Tuple):
+ if rec:
+ found = _find_arg(argname, arg.elts)
+ if found[0] is not None:
+ return found
+ elif arg.name == argname:
+ return i, arg
+ return None, None
+
+
+def _format_args(args, defaults=None, annotations=None):
+ values = []
+ if args is None:
+ return ""
+ if annotations is None:
+ annotations = []
+ if defaults is not None:
+ default_offset = len(args) - len(defaults)
+ packed = itertools.zip_longest(args, annotations)
+ for i, (arg, annotation) in enumerate(packed):
+ if isinstance(arg, Tuple):
+ values.append("(%s)" % _format_args(arg.elts))
+ else:
+ argname = arg.name
+ if annotation is not None:
+ argname += ":" + annotation.as_string()
+ values.append(argname)
+
+ if defaults is not None and i >= default_offset:
+ if defaults[i - default_offset] is not None:
+ values[-1] += "=" + defaults[i - default_offset].as_string()
+ return ", ".join(values)
+
+
+class AssignAttr(mixins.ParentAssignTypeMixin, NodeNG):
+ """Variation of :class:`ast.Assign` representing assignment to an attribute.
+
+ >>> node = astroid.extract_node('self.attribute = range(10)')
+ >>> node
+
+ >>> list(node.get_children())
+ [, ]
+ >>> list(node.get_children())[0].as_string()
+ 'self.attribute'
+ """
+
+ _astroid_fields = ("expr",)
+ _other_fields = ("attrname",)
+ expr = None
+ """What has the attribute that is being assigned to.
+
+ :type: NodeNG or None
+ """
+
+ def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param attrname: The name of the attribute being assigned to.
+ :type attrname: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.attrname = attrname
+ """The name of the attribute being assigned to.
+
+ :type: str or None
+ """
+
+ super(AssignAttr, self).__init__(lineno, col_offset, parent)
+
+ def postinit(self, expr=None):
+ """Do some setup after initialisation.
+
+ :param expr: What has the attribute that is being assigned to.
+ :type expr: NodeNG or None
+ """
+ self.expr = expr
+
+ def get_children(self):
+ yield self.expr
+
+
+class Assert(Statement):
+ """Class representing an :class:`ast.Assert` node.
+
+ An :class:`Assert` node represents an assert statement.
+
+ >>> node = astroid.extract_node('assert len(things) == 10, "Not enough things"')
+ >>> node
+
+ """
+
+ _astroid_fields = ("test", "fail")
+ test = None
+ """The test that passes or fails the assertion.
+
+ :type: NodeNG or None
+ """
+ fail = None
+ """The message shown when the assertion fails.
+
+ :type: NodeNG or None
+ """
+
+ def postinit(self, test=None, fail=None):
+ """Do some setup after initialisation.
+
+ :param test: The test that passes or fails the assertion.
+ :type test: NodeNG or None
+
+ :param fail: The message shown when the assertion fails.
+ :type fail: NodeNG or None
+ """
+ self.fail = fail
+ self.test = test
+
+ def get_children(self):
+ yield self.test
+
+ if self.fail is not None:
+ yield self.fail
+
+
+class Assign(mixins.AssignTypeMixin, Statement):
+ """Class representing an :class:`ast.Assign` node.
+
+ An :class:`Assign` is a statement where something is explicitly
+ asssigned to.
+
+ >>> node = astroid.extract_node('variable = range(10)')
+ >>> node
+
+ """
+
+ _astroid_fields = ("targets", "value")
+ _other_other_fields = ("type_annotation",)
+ targets = None
+ """What is being assigned to.
+
+ :type: list(NodeNG) or None
+ """
+ value = None
+ """The value being assigned to the variables.
+
+ :type: NodeNG or None
+ """
+ type_annotation = None
+ """If present, this will contain the type annotation passed by a type comment
+
+ :type: NodeNG or None
+ """
+
+ def postinit(self, targets=None, value=None, type_annotation=None):
+ """Do some setup after initialisation.
+
+ :param targets: What is being assigned to.
+ :type targets: list(NodeNG) or None
+
+ :param value: The value being assigned to the variables.
+ :type: NodeNG or None
+ """
+ self.targets = targets
+ self.value = value
+ self.type_annotation = type_annotation
+
+ def get_children(self):
+ yield from self.targets
+
+ yield self.value
+
+ @decorators.cached
+ def _get_assign_nodes(self):
+ return [self] + list(self.value._get_assign_nodes())
+
+ def _get_yield_nodes_skip_lambdas(self):
+ yield from self.value._get_yield_nodes_skip_lambdas()
+
+
+class AnnAssign(mixins.AssignTypeMixin, Statement):
+ """Class representing an :class:`ast.AnnAssign` node.
+
+ An :class:`AnnAssign` is an assignment with a type annotation.
+
+ >>> node = astroid.extract_node('variable: List[int] = range(10)')
+ >>> node
+
+ """
+
+ _astroid_fields = ("target", "annotation", "value")
+ _other_fields = ("simple",)
+ target = None
+ """What is being assigned to.
+
+ :type: NodeNG or None
+ """
+ annotation = None
+ """The type annotation of what is being assigned to.
+
+ :type: NodeNG
+ """
+ value = None
+ """The value being assigned to the variables.
+
+ :type: NodeNG or None
+ """
+ simple = None
+ """Whether :attr:`target` is a pure name or a complex statement.
+
+ :type: int
+ """
+
+ def postinit(self, target, annotation, simple, value=None):
+ """Do some setup after initialisation.
+
+ :param target: What is being assigned to.
+ :type target: NodeNG
+
+ :param annotation: The type annotation of what is being assigned to.
+ :type: NodeNG
+
+ :param simple: Whether :attr:`target` is a pure name
+ or a complex statement.
+ :type simple: int
+
+ :param value: The value being assigned to the variables.
+ :type: NodeNG or None
+ """
+ self.target = target
+ self.annotation = annotation
+ self.value = value
+ self.simple = simple
+
+ def get_children(self):
+ yield self.target
+ yield self.annotation
+
+ if self.value is not None:
+ yield self.value
+
+
+class AugAssign(mixins.AssignTypeMixin, Statement):
+ """Class representing an :class:`ast.AugAssign` node.
+
+ An :class:`AugAssign` is an assignment paired with an operator.
+
+ >>> node = astroid.extract_node('variable += 1')
+ >>> node
+
+ """
+
+ _astroid_fields = ("target", "value")
+ _other_fields = ("op",)
+ target = None
+ """What is being assigned to.
+
+ :type: NodeNG or None
+ """
+ value = None
+ """The value being assigned to the variable.
+
+ :type: NodeNG or None
+ """
+
+ def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param op: The operator that is being combined with the assignment.
+ This includes the equals sign.
+ :type op: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.op = op
+ """The operator that is being combined with the assignment.
+
+ This includes the equals sign.
+
+ :type: str or None
+ """
+
+ super(AugAssign, self).__init__(lineno, col_offset, parent)
+
+ def postinit(self, target=None, value=None):
+ """Do some setup after initialisation.
+
+ :param target: What is being assigned to.
+ :type target: NodeNG or None
+
+ :param value: The value being assigned to the variable.
+ :type: NodeNG or None
+ """
+ self.target = target
+ self.value = value
+
+ # This is set by inference.py
+ def _infer_augassign(self, context=None):
+ raise NotImplementedError
+
+ def type_errors(self, context=None):
+ """Get a list of type errors which can occur during inference.
+
+ Each TypeError is represented by a :class:`BadBinaryOperationMessage` ,
+ which holds the original exception.
+
+ :returns: The list of possible type errors.
+ :rtype: list(BadBinaryOperationMessage)
+ """
+ try:
+ results = self._infer_augassign(context=context)
+ return [
+ result
+ for result in results
+ if isinstance(result, util.BadBinaryOperationMessage)
+ ]
+ except exceptions.InferenceError:
+ return []
+
+ def get_children(self):
+ yield self.target
+ yield self.value
+
+
+class Repr(NodeNG):
+ """Class representing an :class:`ast.Repr` node.
+
+ A :class:`Repr` node represents the backtick syntax,
+ which is a deprecated alias for :func:`repr` removed in Python 3.
+
+ >>> node = astroid.extract_node('`variable`')
+ >>> node
+
+ """
+
+ _astroid_fields = ("value",)
+ value = None
+ """What is having :func:`repr` called on it.
+
+ :type: NodeNG or None
+ """
+
+ def postinit(self, value=None):
+ """Do some setup after initialisation.
+
+ :param value: What is having :func:`repr` called on it.
+ :type value: NodeNG or None
+ """
+ self.value = value
+
+
+class BinOp(NodeNG):
+ """Class representing an :class:`ast.BinOp` node.
+
+ A :class:`BinOp` node is an application of a binary operator.
+
+ >>> node = astroid.extract_node('a + b')
+ >>> node
+
+ """
+
+ _astroid_fields = ("left", "right")
+ _other_fields = ("op",)
+ left = None
+ """What is being applied to the operator on the left side.
+
+ :type: NodeNG or None
+ """
+ right = None
+ """What is being applied to the operator on the right side.
+
+ :type: NodeNG or None
+ """
+
+ def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param op: The operator.
+ :type: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.op = op
+ """The operator.
+
+ :type: str or None
+ """
+
+ super(BinOp, self).__init__(lineno, col_offset, parent)
+
+ def postinit(self, left=None, right=None):
+ """Do some setup after initialisation.
+
+ :param left: What is being applied to the operator on the left side.
+ :type left: NodeNG or None
+
+ :param right: What is being applied to the operator on the right side.
+ :type right: NodeNG or None
+ """
+ self.left = left
+ self.right = right
+
+ # This is set by inference.py
+ def _infer_binop(self, context=None):
+ raise NotImplementedError
+
+ def type_errors(self, context=None):
+ """Get a list of type errors which can occur during inference.
+
+ Each TypeError is represented by a :class:`BadBinaryOperationMessage`,
+ which holds the original exception.
+
+ :returns: The list of possible type errors.
+ :rtype: list(BadBinaryOperationMessage)
+ """
+ try:
+ results = self._infer_binop(context=context)
+ return [
+ result
+ for result in results
+ if isinstance(result, util.BadBinaryOperationMessage)
+ ]
+ except exceptions.InferenceError:
+ return []
+
+ def get_children(self):
+ yield self.left
+ yield self.right
+
+ def op_precedence(self):
+ return OP_PRECEDENCE[self.op]
+
+ def op_left_associative(self):
+ # 2**3**4 == 2**(3**4)
+ return self.op != "**"
+
+
+class BoolOp(NodeNG):
+ """Class representing an :class:`ast.BoolOp` node.
+
+ A :class:`BoolOp` is an application of a boolean operator.
+
+ >>> node = astroid.extract_node('a and b')
+ >>> node
+
+ """
+
+ _astroid_fields = ("values",)
+ _other_fields = ("op",)
+ values = None
+ """The values being applied to the operator.
+
+ :type: list(NodeNG) or None
+ """
+
+ def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param op: The operator.
+ :type: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.op = op
+ """The operator.
+
+ :type: str or None
+ """
+
+ super(BoolOp, self).__init__(lineno, col_offset, parent)
+
+ def postinit(self, values=None):
+ """Do some setup after initialisation.
+
+ :param values: The values being applied to the operator.
+ :type values: list(NodeNG) or None
+ """
+ self.values = values
+
+ def get_children(self):
+ yield from self.values
+
+ def op_precedence(self):
+ return OP_PRECEDENCE[self.op]
+
+
+class Break(mixins.NoChildrenMixin, Statement):
+ """Class representing an :class:`ast.Break` node.
+
+ >>> node = astroid.extract_node('break')
+ >>> node
+
+ """
+
+
+class Call(NodeNG):
+ """Class representing an :class:`ast.Call` node.
+
+ A :class:`Call` node is a call to a function, method, etc.
+
+ >>> node = astroid.extract_node('function()')
+ >>> node
+
+ """
+
+ _astroid_fields = ("func", "args", "keywords")
+ func = None
+ """What is being called.
+
+ :type: NodeNG or None
+ """
+ args = None
+ """The positional arguments being given to the call.
+
+ :type: list(NodeNG) or None
+ """
+ keywords = None
+ """The keyword arguments being given to the call.
+
+ :type: list(NodeNG) or None
+ """
+
+ def postinit(self, func=None, args=None, keywords=None):
+ """Do some setup after initialisation.
+
+ :param func: What is being called.
+ :type func: NodeNG or None
+
+ :param args: The positional arguments being given to the call.
+ :type args: list(NodeNG) or None
+
+ :param keywords: The keyword arguments being given to the call.
+ :type keywords: list(NodeNG) or None
+ """
+ self.func = func
+ self.args = args
+ self.keywords = keywords
+
+ @property
+ def starargs(self):
+ """The positional arguments that unpack something.
+
+ :type: list(Starred)
+ """
+ args = self.args or []
+ return [arg for arg in args if isinstance(arg, Starred)]
+
+ @property
+ def kwargs(self):
+ """The keyword arguments that unpack something.
+
+ :type: list(Keyword)
+ """
+ keywords = self.keywords or []
+ return [keyword for keyword in keywords if keyword.arg is None]
+
+ def get_children(self):
+ yield self.func
+
+ yield from self.args
+
+ yield from self.keywords or ()
+
+
+class Compare(NodeNG):
+ """Class representing an :class:`ast.Compare` node.
+
+ A :class:`Compare` node indicates a comparison.
+
+ >>> node = astroid.extract_node('a <= b <= c')
+ >>> node
+
+ >>> node.ops
+ [('<=', ), ('<=',